├── .github ├── CODEOWNERS └── workflows │ ├── npm-prerelease.yaml │ ├── sync-issue-labels.yaml │ ├── test-and-release.yaml │ └── update-repo-info.yaml ├── .gitignore ├── .husky ├── commit-msg └── pre-commit ├── .prettierignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── adr ├── 002-use-npm-package-registry.md ├── 003-use-saga-as-the-main-branch.md ├── 004-move-to-nordicsemiconductor-github-organization.md ├── 005-clean-gitignore.md ├── 006-use-of-typescript.md └── README.md ├── agnss ├── cacheKey.spec.ts ├── cacheKey.ts ├── deviceRequestHandler.ts ├── getCache.ts └── types.ts ├── api ├── ErrorInfo.ts └── res.ts ├── cdk.json ├── cdk ├── apps │ ├── AssetTracker.ts │ ├── HTTPAPIMock.ts │ └── Test.ts ├── cloudformation-http-api-mock.ts ├── cloudformation-test.ts ├── cloudformation.ts ├── createThingGroup.ts ├── helper │ ├── checkRegion.ts │ ├── enabledInContext.ts │ ├── extract-repo-and-owner.spec.ts │ ├── extract-repo-and-owner.ts │ ├── getIotEndpoint.ts │ ├── getStackContexts.ts │ ├── iotRuleSqlCheckIfDefinedAndNotZero.spec.ts │ ├── iotRuleSqlCheckIfDefinedAndNotZero.ts │ ├── iotRuleSqlCheckUndefined.spec.ts │ ├── iotRuleSqlCheckUndefined.ts │ ├── lambdas │ │ ├── commonParent.spec.ts │ │ ├── commonParent.ts │ │ ├── findDependencies.ts │ │ ├── packLambda.ts │ │ ├── packLambdaFromPath.ts │ │ └── packLayer.ts │ └── note.ts ├── regions.ts ├── resources │ ├── AGNSSDeviceRequestHandler.ts │ ├── AGNSSResolver.ts │ ├── AGNSSStorage.ts │ ├── CellGeolocation.ts │ ├── CellGeolocationApi.ts │ ├── FOTAStorage.ts │ ├── HistoricalData.ts │ ├── LambdasWithLayer.ts │ ├── NetworkSurveyGeolocation.ts │ ├── NetworkSurveyGeolocationApi.ts │ ├── NetworkSurveysStorage.ts │ ├── PGPSDeviceRequestHandler.ts │ ├── PGPSResolver.ts │ ├── PGPSStorage.ts │ ├── RepublishDesiredConfig.ts │ ├── ThingGroup.ts │ ├── ThingGroupLambda.ts │ ├── WebAppCD.ts │ ├── WebAppCI.ts │ └── WebAppHosting.ts ├── stacks │ ├── AssetTracker │ │ ├── lambdas.ts │ │ └── stack.ts │ ├── ContinuousDeployment.ts │ ├── WebApp.ts │ ├── WebAppCI.ts │ └── stackName.ts └── test-resources │ ├── HttpApiMockStack.ts │ ├── api-mock-lambda.ts │ ├── prepare-test-resources.ts │ ├── splitMockResponse.spec.ts │ └── splitMockResponse.ts ├── cellGeolocation ├── cellId.spec.ts ├── cellId.ts ├── geolocateFromCache.ts ├── httpApi │ └── cell.ts ├── lambda │ └── invokeStepFunctionFromSQS.ts └── stepFunction │ ├── fromCache.ts │ ├── types.ts │ └── updateCache.ts ├── cli.sh ├── cli ├── cd │ └── listPipelines.ts ├── cli.ts ├── commands │ ├── CommandDefinition.ts │ ├── cd-update-token.ts │ ├── cd.ts │ ├── configure.ts │ ├── create-and-provision-device-cert.ts │ ├── create-ca.ts │ ├── create-simulator-cert.ts │ ├── flash-firmware.ts │ ├── imei.ts │ ├── info.ts │ ├── logs.ts │ ├── purge-buckets.ts │ ├── purge-cas.ts │ ├── purge-iot-user-policy-principals.ts │ ├── register-ca.ts │ ├── retry.spec.ts │ ├── retry.ts │ ├── show-api-configuration.ts │ ├── web-app-ci.ts │ └── web-app-config.ts └── jitp │ ├── caFileLocations.ts │ ├── certsDir.ts │ ├── createCA.ts │ ├── createDeviceCertificate.ts │ ├── createSimulatorKeyAndCSR.ts │ ├── currentCA.ts │ ├── deviceFileLocations.ts │ ├── fingerprint.ts │ ├── listLocalCAs.ts │ ├── listRegisteredCAs.ts │ ├── readlineDevice.ts │ └── registerCA.ts ├── commitlint.config.cjs ├── context.cfg.dist ├── continuous-deployment-web-app.yml ├── continuous-deployment.yml ├── data └── AmazonRootCA1.pem ├── eslint.config.js ├── export.d.ts ├── export.js ├── feature-runner ├── console-reporter.ts ├── run-features.ts └── steps │ ├── aws.ts │ ├── cognito.ts │ ├── context.ts │ ├── mockHTTPAPI.ts │ ├── random.ts │ ├── rest.ts │ ├── retryCheck.spec.ts │ ├── retryCheck.ts │ ├── timestream.ts │ ├── tracker.ts │ └── util.ts ├── features ├── A-GNSS-fan-out.feature.md ├── A-GNSS.feature.md ├── CellGeolocationnRFCloud.feature.md ├── ConnectTracker.feature.md ├── DeleteTrackers.feature.md ├── DeleteUser.feature.md ├── DeviceBatchData.feature.md ├── DeviceMessages.feature.md ├── DeviceUpdateShadow.feature.md ├── FOTA.feature.md ├── IoTUserPolicy.feature.md ├── ListDevices.md ├── NeighborCellGeolocationnRFCloud.feature.md ├── NeighborCellMeasurementsStorage.feature.md ├── NetworkSurveyGeolocationnRFCloud.feature.md ├── NetworkSurveyStorage.feature.md ├── P-GPS-fan-out.feature.md ├── P-GPS.feature.md ├── QueryHistoricalData.feature.md ├── README.md ├── ReadDeviceShadow.feature.md ├── UpdateDeviceConfiguration.feature.md ├── UserRegistration.feature.md ├── WiFiSiteSurveryGeolocationnRFCloud.feature.md └── WiFiSiteSurveyStorage.feature.md ├── geolocation ├── Cell.ts ├── Location.ts ├── parseMCCMNC.spec.ts ├── parseMCCMNC.ts ├── queueJob.ts └── types.ts ├── historicalData ├── batchToTimestreamRecords.spec.ts ├── batchToTimestreamRecords.ts ├── messageToTimestreamRecords.spec.ts ├── messageToTimestreamRecords.ts ├── shadowUpdateToTimestreamRecords.spec.ts ├── shadowUpdateToTimestreamRecords.ts ├── storeMessagesInTimestream.ts ├── storeRecordsInTimeseries.ts └── types.d.ts ├── networkSurveyGeolocation ├── expandMac.spec.ts ├── expandMac.ts ├── geolocateSurvey.ts └── httpApi │ └── locateSurvey.ts ├── package-lock.json ├── package.json ├── pgps ├── cacheKey.spec.ts ├── cacheKey.ts ├── deviceRequestHandler.ts ├── getCache.ts ├── gpsTime.spec.ts ├── gpsTime.ts └── types.ts ├── renovate.json ├── third-party ├── nrfcloud.com │ ├── agnss.ts │ ├── apiclient.spec.ts │ ├── apiclient.ts │ ├── cellgeolocation.ts │ ├── createToken.spec.ts │ ├── createToken.ts │ ├── groundFixRequestSchema.spec.ts │ ├── groundFixRequestSchema.ts │ ├── locate.ts │ ├── networksurveygeolocation.ts │ ├── pgps.ts │ └── settings.ts └── sentry.io │ └── settings.ts ├── tsconfig.json └── util ├── fromEnv.ts ├── isNotEmpty.ts ├── isNullOrUndefined.ts ├── paginate.ts ├── parseJSON.ts └── settings.ts /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @coderbyheart 2 | -------------------------------------------------------------------------------- /.github/workflows/npm-prerelease.yaml: -------------------------------------------------------------------------------- 1 | name: Shallow tested NPM prerelease 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - saga 7 | 8 | env: 9 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 10 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-22.04 15 | steps: 16 | - uses: actions/checkout@v4 17 | 18 | - uses: actions/setup-node@v4 19 | with: 20 | node-version: "20.x" 21 | cache: "npm" 22 | 23 | - name: Install dependencies 24 | run: npm ci --no-audit 25 | 26 | - name: Compile TypeScript for type checking 27 | run: npx tsc 28 | 29 | - name: Run Unit Tests 30 | run: npm test 31 | 32 | - name: Semantic release 33 | # Don't mark this as an error, semantic-release won't work if force-pushing to the branch 34 | # https://github.com/semantic-release/semantic-release/blob/master/docs/support/troubleshooting.md#release-not-found-release-branch-after-git-push---force 35 | continue-on-error: true 36 | run: npx semantic-release 37 | -------------------------------------------------------------------------------- /.github/workflows/sync-issue-labels.yaml: -------------------------------------------------------------------------------- 1 | name: Sync issue labels from docs repository 2 | 3 | env: 4 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 5 | 6 | permissions: 7 | issues: write 8 | 9 | on: 10 | push: 11 | branches: 12 | - saga 13 | paths: 14 | - ".github/workflows/sync-issue-labels.yaml" 15 | workflow_dispatch: 16 | schedule: 17 | - cron: "0 0 * * *" 18 | 19 | jobs: 20 | sync_issue_labels: 21 | runs-on: ubuntu-22.04 22 | steps: 23 | - name: Checkout 24 | uses: actions/checkout@v4 25 | 26 | - name: Sync issue labels from docs repository 27 | run: gh label clone NordicSemiconductor/asset-tracker-cloud-docs --force 28 | -------------------------------------------------------------------------------- /.github/workflows/update-repo-info.yaml: -------------------------------------------------------------------------------- 1 | name: Sync repository info from package.json 2 | 3 | env: 4 | GITHUB_TOKEN: ${{ secrets.UPDATE_REPO_INFO_PAT }} 5 | 6 | on: 7 | push: 8 | branches: 9 | - saga 10 | paths: 11 | - "package.json" 12 | - ".github/workflows/update-repo-info.yaml" 13 | workflow_dispatch: 14 | 15 | jobs: 16 | update_repo_info: 17 | runs-on: ubuntu-22.04 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | 22 | - name: Sync repository description 23 | run: 24 | gh repo edit --description "`cat package.json | jq -r '.description'`" 25 | 26 | - name: Sync repository topics 27 | run: 28 | cat package.json | jq '.keywords[]' | xargs -I'{}' gh repo edit 29 | --add-topic {} 30 | 31 | - name: Sync homepage 32 | run: gh repo edit --homepage "`cat package.json | jq -r '.homepage'`" 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # This file should only cover artifacts caused by the source code in this 2 | # repository, not those caused by the personal choice of editor and/or 3 | # environment of a developer. 4 | # see ADR 5 in https://github.com/NordicSemiconductor/asset-tracker-cloud-docs/ 5 | node_modules/ 6 | npm-debug.log 7 | dist/ 8 | cdk.out/ 9 | certificates/ 10 | log.log 11 | tsconfig.tsbuildinfo 12 | e2e-test-result.json 13 | test_suite_analysis/ 14 | -------------------------------------------------------------------------------- /.husky/commit-msg: -------------------------------------------------------------------------------- 1 | npx commitlint --edit $1 -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | npx tsc 2 | npx lint-staged 3 | npm test -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | package-lock.json 3 | aws/ 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | See 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | See 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019-2024, Nordic Semiconductor ASA | nordicsemi.no 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [!NOTE] 2 | > 3 | > This repository has been archived, because [it is considered feature-complete](https://github.com/NordicSemiconductor/asset-tracker-cloud-docs/discussions/1068). ✔️ 4 | 5 | # nRF Asset Tracker for AWS [![npm version](https://img.shields.io/npm/v/@nordicsemiconductor/asset-tracker-cloud-aws.svg)](https://www.npmjs.com/package/@nordicsemiconductor/asset-tracker-cloud-aws) 6 | 7 | [![GitHub Actions](https://github.com/NordicSemiconductor/asset-tracker-cloud-aws-js/workflows/Test%20and%20Release/badge.svg)](https://github.com/NordicSemiconductor/asset-tracker-cloud-aws-js/actions) 8 | [![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) 9 | [![Renovate](https://img.shields.io/badge/renovate-enabled-brightgreen.svg)](https://renovatebot.com) 10 | [![@commitlint/config-conventional](https://img.shields.io/badge/%40commitlint-config--conventional-brightgreen)](https://github.com/conventional-changelog/commitlint/tree/master/@commitlint/config-conventional) 11 | [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier/) 12 | [![ESLint: TypeScript](https://img.shields.io/badge/ESLint-TypeScript-blue.svg)](https://github.com/typescript-eslint/typescript-eslint) 13 | 14 | A reference implementation of a serverless backend for an IoT product developed 15 | using [AWS CDK](https://aws.amazon.com/cdk) in 16 | [TypeScript](https://www.typescriptlang.org/). 17 | 18 | > :information_source: 19 | > [Read the complete nRF Asset Tracker documentation](https://docs.nordicsemi.com/bundle/nrf-asset-tracker-saga/). 20 | -------------------------------------------------------------------------------- /adr/002-use-npm-package-registry.md: -------------------------------------------------------------------------------- 1 | # ADR 002: Use NPM registry 2 | 3 | In March 2020, 4 | [GitHub acquired NPM](https://github.blog/2020-03-16-npm-is-joining-github/) and 5 | it will be kept operational as a registry for public JavaScript packages. So, as 6 | of now, the packages can be moved back to NPM. 7 | 8 | This supersedes 9 | [ADR 001: Use GitHub package registry](https://github.com/NordicSemiconductor/asset-tracker-cloud-docs/blob/d14e0e0ec692bea5e3b1ea0b50b359c0dc8c0c4f/docs/adr/001-use-github-package-registry.md). 10 | -------------------------------------------------------------------------------- /adr/003-use-saga-as-the-main-branch.md: -------------------------------------------------------------------------------- 1 | # ADR 003: Use saga as the name for the main branch 2 | 3 | Historically, Git and other software use terms such as `master/slave`, 4 | `whitelist/blacklist`, which are based on racial concepts. Their continued use 5 | maintains the racial stereotypes they depict. Better alternatives in meaning and 6 | technical correctness exist, like `leader/follower`, `blocklist/allowlist`. 7 | 8 | In the Nordic mythology, a `saga` is a long, continuous recollection of stories 9 | about the history of humans, legends, and Gods. The term `saga` reflects very 10 | well what happens in a Git repository. Changes happen in branches (some teams 11 | tie them to _User Stories_, which are sometimes directly or loosely coupled to 12 | the main branch). Once the changes are finalized, they get added to the main 13 | branch, or get appended in the case of a rebase. The mental model of a big book 14 | of stories fits this process very well. 15 | 16 | Therefore, the main branch in this project is named `saga`. `master` must not be 17 | used. 18 | -------------------------------------------------------------------------------- /adr/004-move-to-nordicsemiconductor-github-organization.md: -------------------------------------------------------------------------------- 1 | # ADR 004: Move to Nordic Semiconductor GitHub organization 2 | 3 | In February 2021, the project was moved from the `Bifravst` GitHub organization 4 | to the official Nordic Semiconductor GitHub organization 5 | (`Nordic Semiconductor`). This move was caused by the 6 | [promotion of the Bifravst project into an official Nordic Semiconductor open-source project](https://github.com/bifravst/bifravst/issues/56) 7 | with the release of the firmware in 8 | [nRF Connect SDK](https://github.com/nrfconnect/sdk-nrf) as 9 | [nRF9160: Asset Tracker v2](https://developer.nordicsemi.com/nRF_Connect_SDK/doc/1.5.0/nrf/applications/asset_tracker_v2/README.html) 10 | application. 11 | 12 | The project is renamed to _nRF Asset Tracker_. 13 | -------------------------------------------------------------------------------- /adr/005-clean-gitignore.md: -------------------------------------------------------------------------------- 1 | # ADR 005: Clean `.gitignore` file 2 | 3 | A `.gitignore` file in a project must only cover the artifacts caused by the 4 | contained source code and not those caused by the personal choice of editor or 5 | the environment of a developer. 6 | 7 | This is explained in detail 8 | [here](https://github.com/coderbyheart/first-principles/issues/30). 9 | -------------------------------------------------------------------------------- /adr/006-use-of-typescript.md: -------------------------------------------------------------------------------- 1 | # ADR 006: use of TypeScript 2 | 3 | This project is developed using [TypeScript](https://www.typescriptlang.org/) (a 4 | typed superset of JavaScript). 5 | 6 | JavaScript is the most popular language according to the 7 | [2019 Stack Overflow survey](https://insights.stackoverflow.com/survey/2019#technology) 8 | and it is therefore likely that many developers using the project will be 9 | familiar with the language concepts and how to use and run it. 10 | 11 | Virtually all cloud providers provide their SDKs in JavaScript or TypeScript 12 | which this project can integrate natively. 13 | -------------------------------------------------------------------------------- /adr/README.md: -------------------------------------------------------------------------------- 1 | # Architecture decision records 2 | 3 | This folder contains the architecture decision records (ADRs) for this project. 4 | 5 | To know more about ADRs, see 6 | [Documenting architecture decisions](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions) 7 | and the video on 8 | [Communicating and documenting architectural decisions](https://www.youtube.com/watch?v=rwfXkSjFhzc). 9 | -------------------------------------------------------------------------------- /agnss/cacheKey.spec.ts: -------------------------------------------------------------------------------- 1 | import { cacheKey } from './cacheKey.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('cacheKey', () => { 6 | void it('should create a cache key', () => 7 | assert.equal( 8 | cacheKey({ 9 | binHours: 1, 10 | request: { 11 | mcc: 242, 12 | mnc: 1, 13 | cell: 21626624, 14 | area: 30401, 15 | types: [1, 2, 3, 4, 6, 7, 8, 9], 16 | }, 17 | }), 18 | `242-1-21626624-30401-1_2_3_4_6_7_8_9-${new Date() 19 | .toISOString() 20 | .slice(0, 13) 21 | .replace(/[:-]/g, '')}0000`, 22 | )) 23 | }) 24 | -------------------------------------------------------------------------------- /agnss/cacheKey.ts: -------------------------------------------------------------------------------- 1 | import type { Static } from '@sinclair/typebox' 2 | import type { agnssRequestSchema } from './types.js' 3 | 4 | export const cacheKey = ({ 5 | request, 6 | binHours, 7 | }: { 8 | request: Static 9 | binHours: number 10 | }): string => { 11 | const binMs = binHours * 60 * 60 * 1000 12 | const { mcc, mnc, cell, area, types } = request 13 | return `${mcc}-${mnc}-${cell}-${area}-${types.join('_')}-${new Date( 14 | Math.floor(Date.now() / binMs) * binMs, 15 | ) 16 | .toISOString() 17 | .slice(0, 19) 18 | .replace(/[:-]/g, '')}` 19 | } 20 | -------------------------------------------------------------------------------- /agnss/getCache.ts: -------------------------------------------------------------------------------- 1 | import type { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import { GetItemCommand } from '@aws-sdk/client-dynamodb' 3 | import { unmarshall } from '@aws-sdk/util-dynamodb' 4 | import type { Static } from '@sinclair/typebox' 5 | import { ErrorType, type ErrorInfo } from '../api/ErrorInfo.js' 6 | import type { agnssRequestSchema } from './types.js' 7 | 8 | export type AGNSSDataCache = Static & { 9 | source: string 10 | dataHex?: string[] 11 | unresolved?: boolean 12 | updatedAt: Date 13 | } 14 | 15 | export const getCache = 16 | ({ dynamodb, TableName }: { dynamodb: DynamoDBClient; TableName: string }) => 17 | async (cacheKey: string): Promise<{ error: ErrorInfo } | AGNSSDataCache> => { 18 | try { 19 | const { Item } = await dynamodb.send( 20 | new GetItemCommand({ 21 | TableName, 22 | Key: { 23 | cacheKey: { 24 | S: cacheKey, 25 | }, 26 | }, 27 | }), 28 | ) 29 | 30 | if (Item === undefined) throw new Error('NOT_FOUND') 31 | 32 | const entry = unmarshall(Item) 33 | return { 34 | ...entry, 35 | updatedAt: new Date(entry.updatedAt as string), 36 | types: [...(entry.types as Set)], 37 | dataHex: 38 | entry.dataHex !== undefined 39 | ? [...(entry.dataHex as Set)] 40 | : undefined, 41 | } as AGNSSDataCache 42 | } catch (err) { 43 | if ( 44 | (err as Error).message === 'NOT_FOUND' || 45 | (err as Error).name === 'ResourceNotFoundException' 46 | ) 47 | return { 48 | error: { 49 | type: ErrorType.EntityNotFound, 50 | message: `Report ${cacheKey} not found!`, 51 | }, 52 | } 53 | console.error( 54 | JSON.stringify({ 55 | getCache: { 56 | err, 57 | errorMessage: (err as Error).message, 58 | id: cacheKey, 59 | TableName, 60 | }, 61 | }), 62 | ) 63 | return { 64 | error: { 65 | type: ErrorType.InternalError, 66 | message: (err as Error).message, 67 | }, 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /agnss/types.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '@sinclair/typebox' 2 | 3 | export enum AGNSSType { 4 | 'GPS UTC' = 1, 5 | 'GPS Ephemerides' = 2, 6 | 'GPS Almanac' = 3, 7 | 'Klobuchar Ionospheric Correction' = 4, 8 | 'Nequick Ionospheric Correction' = 5, 9 | 'GPS Time of Week' = 6, 10 | 'GPS System Clock' = 7, 11 | 'Location (lat/lon of cell)' = 8, 12 | 'GPS Integrity' = 9, 13 | 'QZSS Almanac' = 11, 14 | 'QZSS Ephemerides' = 12, 15 | 'QZSS Integrity' = 13, 16 | } 17 | 18 | const PositiveInteger = Type.Integer({ minimum: 1, title: 'positive integer' }) 19 | 20 | /** 21 | * @see https://api.nrfcloud.com/v1#tag/GNSS/operation/GetAssistanceData 22 | */ 23 | export const agnssRequestSchema = Type.Object({ 24 | mcc: Type.Integer({ minimum: 100, maximum: 999 }), 25 | mnc: Type.Integer({ minimum: 0, maximum: 999 }), 26 | cell: PositiveInteger, 27 | area: PositiveInteger, 28 | phycell: Type.Optional(PositiveInteger), 29 | types: Type.Array(Type.Enum(AGNSSType), { minItems: 1 }), 30 | }) 31 | -------------------------------------------------------------------------------- /api/ErrorInfo.ts: -------------------------------------------------------------------------------- 1 | export enum ErrorType { 2 | EntityNotFound = 'EntityNotFound', 3 | BadRequest = 'BadRequest', 4 | AccessDenied = 'AccessDenied', 5 | InternalError = 'InternalError', 6 | Conflict = 'Conflict', 7 | BadGateway = 'BadGateway', 8 | } 9 | 10 | export type ErrorInfo = { 11 | type: ErrorType 12 | message: string 13 | detail?: any 14 | } 15 | 16 | export const toStatusCode = { 17 | [ErrorType.BadRequest]: 400, 18 | [ErrorType.AccessDenied]: 403, 19 | [ErrorType.EntityNotFound]: 404, 20 | [ErrorType.InternalError]: 500, 21 | [ErrorType.Conflict]: 409, 22 | [ErrorType.BadGateway]: 502, 23 | } 24 | -------------------------------------------------------------------------------- /api/res.ts: -------------------------------------------------------------------------------- 1 | import type { APIGatewayProxyResultV2 } from 'aws-lambda' 2 | 3 | export const res = 4 | (statusCode: number, options?: { expires: number }) => 5 | (body: unknown): APIGatewayProxyResultV2 => ({ 6 | statusCode, 7 | headers: { 8 | 'Access-Control-Allow-Origin': '*', 9 | 'Content-Type': 'application/json', 10 | ...(options?.expires !== undefined && { 11 | 'Cache-Control': `public, max-age=${options.expires}`, 12 | Expires: new Date( 13 | new Date().getTime() + options.expires * 1000, 14 | ).toUTCString(), 15 | }), 16 | 'X-asset-tracker-Version': process.env.VERSION ?? 'unknown', 17 | }, 18 | body: JSON.stringify(body), 19 | }) 20 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "node --import tsx --no-warnings --unhandled-rejections=strict cdk/cloudformation.ts" 3 | } 4 | -------------------------------------------------------------------------------- /cdk/apps/AssetTracker.ts: -------------------------------------------------------------------------------- 1 | import { App } from 'aws-cdk-lib' 2 | import pjson from '../../package.json' 3 | import { enabledInContext } from '../helper/enabledInContext.js' 4 | import { extractRepoAndOwner } from '../helper/extract-repo-and-owner.js' 5 | import type { 6 | AssetTrackerLambdas, 7 | CDKLambdas, 8 | } from '../stacks/AssetTracker/lambdas.js' 9 | import { AssetTrackerStack } from '../stacks/AssetTracker/stack.js' 10 | import { ContinuousDeploymentStack } from '../stacks/ContinuousDeployment.js' 11 | import { WebAppStack } from '../stacks/WebApp.js' 12 | import { WebAppCIStack } from '../stacks/WebAppCI.js' 13 | 14 | export class AssetTrackerApp extends App { 15 | public constructor(args: { 16 | packedLambdas: AssetTrackerLambdas 17 | packedCDKLambdas: CDKLambdas 18 | context?: Record 19 | }) { 20 | super({ context: args.context }) 21 | // Core 22 | const coreStack = new AssetTrackerStack(this, { 23 | ...args, 24 | }) 25 | const checkFlag = enabledInContext(this.node) 26 | // Web App 27 | checkFlag({ 28 | key: 'webapp', 29 | component: 'Web App', 30 | onUndefined: 'enabled', 31 | onEnabled: () => new WebAppStack(this).addDependency(coreStack), 32 | }) 33 | // Web App CI 34 | checkFlag({ 35 | key: 'web-app-ci', 36 | component: 'Web App CI', 37 | onEnabled: () => { 38 | new WebAppCIStack(this, { 39 | repository: extractRepoAndOwner(pjson.deploy.webApp.repository), 40 | }).addDependency(coreStack) 41 | }, 42 | }) 43 | // CD 44 | checkFlag({ 45 | key: 'cd', 46 | component: 'Continuous Deployment', 47 | onEnabled: () => { 48 | new ContinuousDeploymentStack(this, { 49 | core: { 50 | ...extractRepoAndOwner(pjson.repository.url), 51 | branch: pjson.deploy.branch ?? 'saga', 52 | }, 53 | webApp: { 54 | ...extractRepoAndOwner(pjson.deploy.webApp.repository), 55 | branch: pjson.deploy.webApp.branch ?? 'saga', 56 | }, 57 | }) 58 | }, 59 | }) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /cdk/apps/HTTPAPIMock.ts: -------------------------------------------------------------------------------- 1 | import { App } from 'aws-cdk-lib' 2 | import { HttpApiMockStack } from '../test-resources/HttpApiMockStack.js' 3 | import type { HTTPAPIMockLambdas } from '../test-resources/prepare-test-resources.js' 4 | 5 | /** 6 | * This sets up the parts of the app needed for the end-to-end tests 7 | */ 8 | export class HTTPAPIMockApp extends App { 9 | public constructor({ 10 | packedHTTPAPIMockLambdas, 11 | }: { 12 | packedHTTPAPIMockLambdas: HTTPAPIMockLambdas 13 | }) { 14 | super({ 15 | context: { 16 | isTest: true, 17 | }, 18 | }) 19 | new HttpApiMockStack(this, { 20 | packedHTTPAPIMockLambdas, 21 | }) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /cdk/apps/Test.ts: -------------------------------------------------------------------------------- 1 | import { App } from 'aws-cdk-lib' 2 | import type { 3 | AssetTrackerLambdas, 4 | CDKLambdas, 5 | } from '../stacks/AssetTracker/lambdas.js' 6 | import { AssetTrackerStack } from '../stacks/AssetTracker/stack.js' 7 | 8 | /** 9 | * This sets up the parts of the app needed for the end-to-end tests 10 | */ 11 | export class TestApp extends App { 12 | public constructor(args: { 13 | packedLambdas: AssetTrackerLambdas 14 | packedCDKLambdas: CDKLambdas 15 | context?: Record 16 | }) { 17 | super({ context: args.context }) 18 | new AssetTrackerStack(this, { 19 | ...args, 20 | }) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /cdk/cloudformation-http-api-mock.ts: -------------------------------------------------------------------------------- 1 | import { HTTPAPIMockApp } from './apps/HTTPAPIMock.js' 2 | import { prepareHTTPAPIMockLambdas } from './test-resources/prepare-test-resources.js' 3 | 4 | new HTTPAPIMockApp({ 5 | packedHTTPAPIMockLambdas: await prepareHTTPAPIMockLambdas(), 6 | }).synth() 7 | -------------------------------------------------------------------------------- /cdk/cloudformation-test.ts: -------------------------------------------------------------------------------- 1 | import { TestApp } from './apps/Test.js' 2 | import { 3 | prepareAssetTrackerLambdas, 4 | prepareCDKLambdas, 5 | } from './stacks/AssetTracker/lambdas.js' 6 | 7 | new TestApp({ 8 | packedLambdas: await prepareAssetTrackerLambdas(), 9 | packedCDKLambdas: await prepareCDKLambdas(), 10 | context: { 11 | version: process.env.VERSION ?? '0.0.0-development', 12 | isTest: true, 13 | }, 14 | }).synth() 15 | -------------------------------------------------------------------------------- /cdk/cloudformation.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import chalk from 'chalk' 3 | import { getSettings } from '../util/settings.js' 4 | import { AssetTrackerApp } from './apps/AssetTracker.js' 5 | import { getStackContexts } from './helper/getStackContexts.js' 6 | import { warn } from './helper/note.js' 7 | import { 8 | prepareAssetTrackerLambdas, 9 | prepareCDKLambdas, 10 | } from './stacks/AssetTracker/lambdas.js' 11 | import { CORE_STACK_NAME } from './stacks/stackName.js' 12 | 13 | const ssm = new SSMClient({}) 14 | const fetchStackContexts = getStackContexts({ 15 | ssm, 16 | stackName: CORE_STACK_NAME, 17 | }) 18 | 19 | const [codebuildSettings, context] = await Promise.all([ 20 | getSettings<{ token: string }>({ 21 | ssm, 22 | stackName: CORE_STACK_NAME, 23 | scope: 'codebuild', 24 | system: 'github', 25 | })().catch(() => ({})), 26 | fetchStackContexts(), 27 | ]) 28 | 29 | const ctx = { 30 | version: process.env.VERSION ?? '0.0.0-development', 31 | ...context, 32 | } as Record 33 | 34 | const enableCD = 'token' in codebuildSettings 35 | if (!enableCD) { 36 | warn( 37 | 'Continuous Deployment', 38 | 'No GitHub API key configured. Continuous deployment will be disabled.', 39 | ) 40 | warn( 41 | 'Continuous Deployment', 42 | `Use ${chalk.greenBright( 43 | `./cli.sh configure codebuild github token `, 44 | )} to set the token`, 45 | ) 46 | ctx.cd = '0' 47 | } 48 | 49 | new AssetTrackerApp({ 50 | packedLambdas: await prepareAssetTrackerLambdas(), 51 | packedCDKLambdas: await prepareCDKLambdas(), 52 | context: ctx, 53 | }).synth() 54 | -------------------------------------------------------------------------------- /cdk/helper/checkRegion.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk' 2 | import { supportedRegions } from '../regions.js' 3 | 4 | export const checkRegion = (): void => { 5 | if (!supportedRegions.includes(process.env.AWS_REGION ?? 'us-east-1')) { 6 | console.log( 7 | chalk.yellow.inverse.bold(' WARNING '), 8 | chalk.yellow( 9 | `Your region ${ 10 | process.env.AWS_REGION ?? 'us-east-1' 11 | } from the environment variable AWS_REGION is not in the list of supported regions!`, 12 | ), 13 | ) 14 | console.log( 15 | chalk.yellow.inverse.bold(' WARNING '), 16 | chalk.yellow(`CDK might not be able to successfully deploy.`), 17 | ) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /cdk/helper/enabledInContext.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk' 2 | import type { Node } from 'constructs' 3 | 4 | const ENABLED = 'enabled' 5 | const DISABLED = 'disabled' 6 | 7 | export const enabledInContext = 8 | (node: Node) => 9 | ({ 10 | key, 11 | component, 12 | onDisabled, 13 | onEnabled, 14 | onUndefined, 15 | silent, 16 | }: { 17 | key: string 18 | component: string 19 | onEnabled?: () => void 20 | onDisabled?: () => void 21 | onUndefined?: typeof ENABLED | typeof DISABLED 22 | silent?: boolean 23 | }): boolean => { 24 | const v = node.tryGetContext(key) 25 | if (v === '1' || (v === undefined && onUndefined === ENABLED)) { 26 | const help = [] 27 | help.push( 28 | chalk.gray(`Component`), 29 | chalk.blueBright(component), 30 | chalk.green('enabled.'), 31 | ) 32 | help.push( 33 | chalk.gray(`Run`), 34 | chalk.yellow.dim(`./cli.sh configure context stack ${key} 0`), 35 | chalk.gray(`to disable.`), 36 | ) 37 | !(silent ?? false) && console.error(...help) 38 | onEnabled?.() 39 | return true 40 | } 41 | const help = [ 42 | chalk.gray(`Component`), 43 | chalk.grey.bold(component), 44 | chalk.gray('disabled.'), 45 | ] 46 | help.push( 47 | chalk.gray(`Run`), 48 | chalk.yellow.dim(`./cli.sh configure context stack ${key} 1`), 49 | chalk.grey(`to enable.`), 50 | ) 51 | !(silent ?? false) && console.error(...help) 52 | onDisabled?.() 53 | return false 54 | } 55 | -------------------------------------------------------------------------------- /cdk/helper/extract-repo-and-owner.spec.ts: -------------------------------------------------------------------------------- 1 | import { extractRepoAndOwner } from './extract-repo-and-owner.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('extractRepoAndOwner()', () => { 6 | void it('should parse a git repo', () => { 7 | assert.deepEqual( 8 | extractRepoAndOwner( 9 | 'git+https://github.com/NordicSemiconductor/asset-tracker-cloud-aws-js.git', 10 | ), 11 | { 12 | owner: 'NordicSemiconductor', 13 | repo: 'asset-tracker-cloud-aws-js', 14 | }, 15 | ) 16 | }) 17 | }) 18 | -------------------------------------------------------------------------------- /cdk/helper/extract-repo-and-owner.ts: -------------------------------------------------------------------------------- 1 | import { URL } from 'url' 2 | 3 | export const extractRepoAndOwner = ( 4 | repositoryUrl: string, 5 | ): { repo: string; owner: string } => { 6 | const repoUrl = new URL(repositoryUrl) 7 | const owner = repoUrl?.pathname?.split('/')[1] 8 | const repo = repoUrl?.pathname?.split('/')[2]?.replace(/\..+$/, '') 9 | if (owner === undefined || repo === undefined) { 10 | throw new Error(`Could not determine owner and repo from repository.url!`) 11 | } 12 | return { owner, repo } 13 | } 14 | -------------------------------------------------------------------------------- /cdk/helper/getIotEndpoint.ts: -------------------------------------------------------------------------------- 1 | import type { IoTClient } from '@aws-sdk/client-iot' 2 | import { DescribeEndpointCommand } from '@aws-sdk/client-iot' 3 | 4 | export const getIotEndpoint = async (iot: IoTClient): Promise => 5 | iot 6 | .send(new DescribeEndpointCommand({ endpointType: 'iot:Data-ATS' })) 7 | .then(({ endpointAddress }) => { 8 | if (endpointAddress === null || endpointAddress === undefined) { 9 | throw new Error(`Failed to resolved AWS IoT endpoint`) 10 | } 11 | return endpointAddress 12 | }) 13 | -------------------------------------------------------------------------------- /cdk/helper/getStackContexts.ts: -------------------------------------------------------------------------------- 1 | import type { SSMClient } from '@aws-sdk/client-ssm' 2 | import { getSettings } from '../../util/settings.js' 3 | 4 | type StackContexts = { 5 | cd: '0' | '1' 6 | webapp: '0' | '1' 7 | } 8 | 9 | const defaults: StackContexts = { 10 | cd: '0', 11 | webapp: '1', 12 | } 13 | 14 | export const getStackContexts = 15 | ({ 16 | ssm, 17 | stackName, 18 | }: { 19 | ssm: SSMClient 20 | stackName: string 21 | }): (() => Promise) => 22 | async () => 23 | getSettings>({ 24 | ssm, 25 | system: 'stack', 26 | scope: 'context', 27 | stackName, 28 | })() 29 | .then((cfg) => ({ ...defaults, ...cfg })) 30 | .catch(() => defaults) 31 | -------------------------------------------------------------------------------- /cdk/helper/iotRuleSqlCheckIfDefinedAndNotZero.spec.ts: -------------------------------------------------------------------------------- 1 | import { iotRuleSqlCheckIfDefinedAndNotZero } from './iotRuleSqlCheckIfDefinedAndNotZero.js' 2 | 3 | import { describe, it } from 'node:test' 4 | import assert from 'node:assert' 5 | 6 | void describe('iotRuleSqlCheckIfDefinedAndNotZero', () => { 7 | void it('should check for undefined for the given values', () => 8 | assert.equal( 9 | iotRuleSqlCheckIfDefinedAndNotZero(['foo', 'bar']), 10 | '(isUndefined(foo) = true OR foo > 0) AND (isUndefined(bar) = true OR bar > 0)', 11 | )) 12 | }) 13 | -------------------------------------------------------------------------------- /cdk/helper/iotRuleSqlCheckIfDefinedAndNotZero.ts: -------------------------------------------------------------------------------- 1 | export const iotRuleSqlCheckIfDefinedAndNotZero = (fields: string[]): string => 2 | fields.map((f) => `(isUndefined(${f}) = true OR ${f} > 0)`).join(' AND ') 3 | -------------------------------------------------------------------------------- /cdk/helper/iotRuleSqlCheckUndefined.spec.ts: -------------------------------------------------------------------------------- 1 | import { iotRuleSqlCheckUndefined } from './iotRuleSqlCheckUndefined.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('iotRuleSqlCheckUndefined', () => { 6 | void it('should check for undefined for the given values', () => 7 | assert.equal( 8 | iotRuleSqlCheckUndefined(['foo', 'bar']), 9 | 'isUndefined(foo) = false AND isUndefined(bar) = false', 10 | )) 11 | }) 12 | -------------------------------------------------------------------------------- /cdk/helper/iotRuleSqlCheckUndefined.ts: -------------------------------------------------------------------------------- 1 | export const iotRuleSqlCheckUndefined = (fields: string[]): string => 2 | fields.map((f) => `isUndefined(${f}) = false`).join(' AND ') 3 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/commonParent.spec.ts: -------------------------------------------------------------------------------- 1 | import { commonParent } from './commonParent.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('commonParent()', () => { 6 | void it('should return the common parent directory', () => 7 | assert.equal( 8 | commonParent([ 9 | '/some/dir/lambda/onMessage.ts', 10 | '/some/dir/lambda/notifyClients.ts', 11 | '/some/dir/lambda/wirepasPublish.ts', 12 | '/some/dir/wirepas-5g-mesh-gateway/protobuf/ts/data_message.ts', 13 | ]), 14 | '/some/dir/', 15 | )) 16 | void it('should return the entire parent tree for a single file', () => 17 | assert.equal( 18 | commonParent(['/some/dir/lambda/onMessage.ts']), 19 | '/some/dir/lambda/', 20 | )) 21 | void it('should return "/" if files have no common directory', () => 22 | assert.equal( 23 | commonParent([ 24 | '/some/dir/lambda/onMessage.ts', 25 | '/other/dir/lambda/onMessage.ts', 26 | ]), 27 | '/', 28 | )) 29 | 30 | void it('should return the common ancestor only up until the directory level', () => 31 | assert.equal( 32 | commonParent([ 33 | '/some/dir/lambdas/cors.ts', 34 | '/some/dir/lambdas/corsHeaders.ts', 35 | ]), 36 | '/some/dir/lambdas/', 37 | )) 38 | }) 39 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/commonParent.ts: -------------------------------------------------------------------------------- 1 | import { parse, sep } from 'node:path' 2 | 3 | /** 4 | * Returns the common ancestor directory from a list of files 5 | */ 6 | export const commonParent = (files: string[]): string => { 7 | if (files.length === 1) return parse(files[0] ?? '').dir + sep 8 | let index = 0 9 | let prefix = '/' 10 | 11 | while (files.filter((f) => f.startsWith(prefix)).length === files.length) { 12 | prefix = files[0]?.slice(0, index++) ?? '' 13 | } 14 | 15 | return prefix.slice(0, prefix.lastIndexOf('/') + 1) 16 | } 17 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/findDependencies.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync, statSync } from 'node:fs' 2 | import path from 'node:path' 3 | import ts, { type ImportDeclaration, type StringLiteral } from 'typescript' 4 | 5 | /** 6 | * Resolve project-level dependencies for the given file using TypeScript compiler API 7 | */ 8 | export const findDependencies = ( 9 | sourceFile: string, 10 | imports: string[] = [], 11 | visited: string[] = [], 12 | ): string[] => { 13 | if (visited.includes(sourceFile)) return imports 14 | 15 | const fileNode = ts.createSourceFile( 16 | sourceFile, 17 | readFileSync(sourceFile, 'utf-8').toString(), 18 | ts.ScriptTarget.ES2022, 19 | /*setParentNodes */ true, 20 | ) 21 | 22 | const parseChild = (node: ts.Node) => { 23 | if (node.kind !== ts.SyntaxKind.ImportDeclaration) return 24 | const moduleSpecifier = ( 25 | (node as ImportDeclaration).moduleSpecifier as StringLiteral 26 | ).text 27 | const file = moduleSpecifier.startsWith('.') 28 | ? path 29 | .resolve(path.parse(sourceFile).dir, moduleSpecifier) 30 | // In ECMA Script modules, all imports from local files must have an extension. 31 | // See https://nodejs.org/api/esm.html#mandatory-file-extensions 32 | // So we need to replace the `.js` in the import specification to find the TypeScript source for the file. 33 | // Example: import { Network, notifyClients } from './notifyClients.js' 34 | // The source file for that is actually in './notifyClients.ts' 35 | .replace(/\.js$/, '.ts') 36 | : moduleSpecifier 37 | try { 38 | const s = statSync(file) 39 | if (!s.isDirectory()) imports.push(file) 40 | } catch { 41 | // Module or file not found 42 | visited.push(file) 43 | } 44 | } 45 | ts.forEachChild(fileNode, parseChild) 46 | visited.push(sourceFile) 47 | 48 | for (const file of imports) { 49 | findDependencies(file, imports, visited) 50 | } 51 | 52 | return imports 53 | } 54 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/packLambda.ts: -------------------------------------------------------------------------------- 1 | import swc from '@swc/core' 2 | import { createWriteStream } from 'node:fs' 3 | import { parse } from 'path' 4 | import yazl from 'yazl' 5 | import { commonParent } from './commonParent.js' 6 | import { findDependencies } from './findDependencies.js' 7 | 8 | export type PackedLambda = { zipFile: string; handler: string } 9 | 10 | const removeCommonAncestor = 11 | (parentDir: string) => 12 | (file: string): string => { 13 | const p = parse(file) 14 | const jsFileName = [ 15 | p.dir.replace(parentDir.slice(0, parentDir.length - 1), ''), 16 | `${p.name}.js`, 17 | ] 18 | .join('/') 19 | // Replace leading slash 20 | .replace(/^\//, '') 21 | 22 | return jsFileName 23 | } 24 | 25 | /** 26 | * In the bundle we only include code that's not in the layer. 27 | */ 28 | export const packLambda = async ({ 29 | sourceFile, 30 | zipFile, 31 | debug, 32 | progress, 33 | }: { 34 | sourceFile: string 35 | zipFile: string 36 | debug?: (label: string, info: string) => void 37 | progress?: (label: string, info: string) => void 38 | }): Promise<{ handler: string }> => { 39 | const lambdaFiles = [sourceFile, ...findDependencies(sourceFile)] 40 | 41 | const zipfile = new yazl.ZipFile() 42 | 43 | const stripCommon = removeCommonAncestor(commonParent(lambdaFiles)) 44 | 45 | for (const file of lambdaFiles) { 46 | const compiled = ( 47 | await swc.transformFile(file, { 48 | jsc: { 49 | target: 'es2022', 50 | }, 51 | }) 52 | ).code 53 | debug?.(`compiled`, compiled) 54 | const jsFileName = stripCommon(file) 55 | zipfile.addBuffer(Buffer.from(compiled, 'utf-8'), jsFileName) 56 | progress?.(`added`, jsFileName) 57 | } 58 | 59 | // Mark it as ES module 60 | zipfile.addBuffer( 61 | Buffer.from( 62 | JSON.stringify({ 63 | type: 'module', 64 | }), 65 | 'utf-8', 66 | ), 67 | 'package.json', 68 | ) 69 | progress?.(`added`, 'package.json') 70 | 71 | await new Promise((resolve) => { 72 | zipfile.outputStream.pipe(createWriteStream(zipFile)).on('close', () => { 73 | resolve() 74 | }) 75 | zipfile.end() 76 | }) 77 | progress?.(`written`, zipFile) 78 | 79 | return { handler: stripCommon(sourceFile) } 80 | } 81 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/packLambdaFromPath.ts: -------------------------------------------------------------------------------- 1 | import { mkdir } from 'node:fs/promises' 2 | import path from 'node:path' 3 | import { packLambda } from './packLambda.js' 4 | export type PackedLambda = { zipFile: string; handler: string } 5 | 6 | export const packLambdaFromPath = async ( 7 | id: string, 8 | sourceFile: string, 9 | handlerFunction = 'handler', 10 | baseDir = process.cwd(), 11 | ): Promise => { 12 | try { 13 | await mkdir(path.join(process.cwd(), 'dist', 'lambdas'), { 14 | recursive: true, 15 | }) 16 | } catch { 17 | // Directory exists 18 | } 19 | const zipFile = path.join(process.cwd(), 'dist', 'lambdas', `${id}.zip`) 20 | const { handler } = await packLambda({ 21 | sourceFile: path.join(baseDir, sourceFile), 22 | zipFile, 23 | }) 24 | return { 25 | zipFile, 26 | handler: handler.replace('.js', `.${handlerFunction}`), 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /cdk/helper/lambdas/packLayer.ts: -------------------------------------------------------------------------------- 1 | import { spawn } from 'child_process' 2 | import { createWriteStream } from 'fs' 3 | import { copyFile, mkdir, readFile, rm, writeFile } from 'fs/promises' 4 | import { glob } from 'glob' 5 | import path from 'path' 6 | import { ZipFile } from 'yazl' 7 | 8 | export type PackedLayer = { layerZipFile: string } 9 | 10 | export const packLayer = async ({ 11 | id, 12 | dependencies, 13 | }: { 14 | id: string 15 | dependencies: string[] 16 | }): Promise => { 17 | const packageJsonFile = path.join(process.cwd(), 'package.json') 18 | const packageLockJsonFile = path.join(process.cwd(), 'package-lock.json') 19 | const { dependencies: deps, devDependencies: devDeps } = JSON.parse( 20 | await readFile(packageJsonFile, 'utf-8'), 21 | ) 22 | 23 | const layerDir = path.join(process.cwd(), 'dist', 'layers', id) 24 | const nodejsDir = path.join(layerDir, 'nodejs') 25 | 26 | try { 27 | await rm(layerDir, { recursive: true }) 28 | } catch { 29 | // Folder does not exist. 30 | } 31 | 32 | await mkdir(nodejsDir, { recursive: true }) 33 | 34 | const depsToBeInstalled = dependencies.reduce( 35 | (resolved, dep) => { 36 | const resolvedDependency = deps[dep] ?? devDeps[dep] 37 | if (resolvedDependency === undefined) 38 | throw new Error( 39 | `Could not resolve dependency "${dep}" in ${packageJsonFile}!`, 40 | ) 41 | return { 42 | ...resolved, 43 | [dep]: resolvedDependency, 44 | } 45 | }, 46 | {} as Record, 47 | ) 48 | 49 | await writeFile( 50 | path.join(nodejsDir, 'package.json'), 51 | JSON.stringify({ 52 | dependencies: depsToBeInstalled, 53 | }), 54 | 'utf-8', 55 | ) 56 | await copyFile(packageLockJsonFile, path.join(nodejsDir, 'package-lock.json')) 57 | 58 | await new Promise((resolve, reject) => { 59 | const [cmd, ...args] = [ 60 | 'npm', 61 | 'ci', 62 | '--ignore-scripts', 63 | '--only=prod', 64 | '--no-audit', 65 | ] 66 | const p = spawn(cmd, args, { 67 | cwd: nodejsDir, 68 | }) 69 | p.on('close', (code) => { 70 | if (code !== 0) { 71 | const msg = `${cmd} ${args.join( 72 | ' ', 73 | )} in ${nodejsDir} exited with code ${code}.` 74 | return reject(new Error(msg)) 75 | } 76 | return resolve() 77 | }) 78 | }) 79 | 80 | const filesToAdd = await glob(`**`, { 81 | cwd: layerDir, 82 | nodir: true, 83 | }) 84 | const zipfile = new ZipFile() 85 | filesToAdd.forEach((f) => { 86 | zipfile.addFile(path.join(layerDir, f), f) 87 | }) 88 | 89 | const zipFileName = await new Promise((resolve) => { 90 | const zipFileName = path.join(process.cwd(), 'dist', 'layers', `${id}.zip`) 91 | zipfile.outputStream 92 | .pipe(createWriteStream(zipFileName)) 93 | .on('close', () => { 94 | resolve(zipFileName) 95 | }) 96 | zipfile.end() 97 | }) 98 | 99 | return { 100 | layerZipFile: zipFileName, 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /cdk/helper/note.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk' 2 | 3 | export const warn = (category: string, note: string): void => { 4 | console.warn('', chalk.magenta('ℹ'), chalk.cyan(category), chalk.grey(note)) 5 | } 6 | export const info = (category: string, note: string): void => { 7 | console.debug( 8 | '', 9 | chalk.blue('ℹ'), 10 | chalk.white.dim(category), 11 | chalk.gray(note), 12 | ) 13 | } 14 | export const setting = (property: string, value: string): void => { 15 | console.debug('', chalk.blueBright(property), chalk.yellow(value)) 16 | } 17 | -------------------------------------------------------------------------------- /cdk/regions.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Note that not all AWS features are available in all AWS regions. 3 | * Here we keep a list of regions that are known to work with the 4 | * nRF Asset Tracker implementation. 5 | */ 6 | export const supportedRegions = [ 7 | 'us-east-1', 8 | 'us-east-2', 9 | 'us-west-1', 10 | 'us-west-2', 11 | 'eu-west-1', 12 | ] 13 | -------------------------------------------------------------------------------- /cdk/resources/AGNSSStorage.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import DynamoDB from 'aws-cdk-lib/aws-dynamodb' 3 | 4 | /** 5 | * Provides storage for A-GNSS requests 6 | */ 7 | export class AGNSSStorage extends CloudFormation.Resource { 8 | public readonly cacheTable: DynamoDB.ITable 9 | public constructor(parent: CloudFormation.Stack, id: string) { 10 | super(parent, id) 11 | 12 | this.cacheTable = new DynamoDB.Table(this, 'cacheTable', { 13 | billingMode: DynamoDB.BillingMode.PAY_PER_REQUEST, 14 | partitionKey: { 15 | name: 'cacheKey', 16 | type: DynamoDB.AttributeType.STRING, 17 | }, 18 | pointInTimeRecovery: true, 19 | removalPolicy: 20 | this.node.tryGetContext('isTest') === true 21 | ? CloudFormation.RemovalPolicy.DESTROY 22 | : CloudFormation.RemovalPolicy.RETAIN, 23 | timeToLiveAttribute: 'ttl', 24 | }) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /cdk/resources/FOTAStorage.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import IAM from 'aws-cdk-lib/aws-iam' 3 | import S3 from 'aws-cdk-lib/aws-s3' 4 | 5 | /** 6 | * Storage firmware files 7 | */ 8 | export class FOTAStorage extends CloudFormation.Resource { 9 | public readonly bucket: S3.IBucket 10 | public constructor( 11 | parent: CloudFormation.Stack, 12 | id: string, 13 | { userRole }: { userRole: IAM.Role }, 14 | ) { 15 | super(parent, id) 16 | 17 | this.bucket = new S3.Bucket(this, 'bucket', { 18 | publicReadAccess: true, 19 | cors: [ 20 | { 21 | allowedHeaders: ['*'], 22 | allowedMethods: [ 23 | S3.HttpMethods.GET, 24 | S3.HttpMethods.PUT, 25 | S3.HttpMethods.DELETE, 26 | ], 27 | allowedOrigins: ['*'], 28 | exposedHeaders: ['Date'], 29 | maxAge: 3600, 30 | }, 31 | ], 32 | removalPolicy: CloudFormation.RemovalPolicy.DESTROY, 33 | blockPublicAccess: { 34 | blockPublicAcls: false, 35 | ignorePublicAcls: false, 36 | restrictPublicBuckets: false, 37 | blockPublicPolicy: false, 38 | }, 39 | objectOwnership: S3.ObjectOwnership.OBJECT_WRITER, 40 | }) 41 | 42 | userRole.addToPolicy( 43 | new IAM.PolicyStatement({ 44 | resources: [`${this.bucket.bucketArn}/*`, this.bucket.bucketArn], 45 | actions: [ 46 | 's3:ListBucket', 47 | 's3:PutObject', 48 | 's3:GetObject', 49 | 's3:DeleteObject', 50 | ], 51 | }), 52 | ) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /cdk/resources/LambdasWithLayer.ts: -------------------------------------------------------------------------------- 1 | import type * as Lambda from 'aws-cdk-lib/aws-lambda' 2 | import type { PackedLambda } from '../helper/lambdas/packLambda.js' 3 | 4 | export type LambdasWithLayer< 5 | A extends { 6 | [key: string]: PackedLambda 7 | }, 8 | > = { 9 | lambdas: { 10 | [P in keyof A]: PackedLambda 11 | } 12 | layers: Lambda.ILayerVersion[] 13 | } 14 | -------------------------------------------------------------------------------- /cdk/resources/NetworkSurveyGeolocationApi.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import Lambda from 'aws-cdk-lib/aws-lambda' 3 | import type { AssetTrackerLambdas } from '../stacks/AssetTracker/lambdas.js' 4 | import type { LambdasWithLayer } from './LambdasWithLayer.js' 5 | import type { NetworkSurveyGeolocation } from './NetworkSurveyGeolocation.js' 6 | import type { NetworkSurveysStorage } from './NetworkSurveysStorage.js' 7 | import Logs from 'aws-cdk-lib/aws-logs' 8 | 9 | /** 10 | * Provides geo-location for Network surveys from devices through a HTTP API 11 | * 12 | * This API is public because it does not expose critical information. 13 | * If you want to protect this API, look into enabling Authentication on HTTP APIs here: https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-jwt-authorizer.html 14 | */ 15 | export class NetworkSurveyGeolocationApi extends CloudFormation.Resource { 16 | public readonly url: string 17 | 18 | public constructor( 19 | parent: CloudFormation.Stack, 20 | id: string, 21 | { 22 | storage, 23 | lambdas, 24 | geolocation, 25 | }: { 26 | storage: NetworkSurveysStorage 27 | lambdas: LambdasWithLayer 28 | geolocation: NetworkSurveyGeolocation 29 | }, 30 | ) { 31 | super(parent, id) 32 | 33 | const getSurveyLocation = new Lambda.Function(this, 'lambda', { 34 | layers: lambdas.layers, 35 | handler: lambdas.lambdas.geolocateNetworkSurveyHttpApi.handler, 36 | architecture: Lambda.Architecture.ARM_64, 37 | runtime: Lambda.Runtime.NODEJS_20_X, 38 | timeout: CloudFormation.Duration.seconds(60), 39 | memorySize: 1792, 40 | code: Lambda.Code.fromAsset( 41 | lambdas.lambdas.geolocateNetworkSurveyHttpApi.zipFile, 42 | ), 43 | description: 'Geolocate Network survey', 44 | environment: { 45 | SURVEYS_TABLE: storage.surveysTable.tableName, 46 | STEP_FUNCTION_ARN: geolocation.stateMachine.stateMachineArn, 47 | VERSION: this.node.tryGetContext('version'), 48 | STACK_NAME: this.stack.stackName, 49 | }, 50 | logRetention: Logs.RetentionDays.ONE_WEEK, 51 | }) 52 | 53 | storage.surveysTable.grantFullAccess(getSurveyLocation) 54 | 55 | geolocation.stateMachine.grantStartExecution(getSurveyLocation) 56 | 57 | this.url = getSurveyLocation.addFunctionUrl({ 58 | authType: Lambda.FunctionUrlAuthType.NONE, 59 | }).url 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /cdk/resources/NetworkSurveysStorage.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import DynamoDB from 'aws-cdk-lib/aws-dynamodb' 3 | import IAM from 'aws-cdk-lib/aws-iam' 4 | import IoT from 'aws-cdk-lib/aws-iot' 5 | 6 | /** 7 | * Provides storage for Network surveys 8 | * 9 | * The result of a Network survey is too large to be put in the AWS shadow (which is limited to 4k). 10 | * 11 | * Therefore devices publish Network surveys on the topic /ground-fix. 12 | * 13 | * These survey are then stored in DynamoDB for retrieval by the app. 14 | */ 15 | export class NetworkSurveysStorage extends CloudFormation.Resource { 16 | public readonly surveysTable: DynamoDB.Table 17 | 18 | public constructor(parent: CloudFormation.Stack, id: string) { 19 | super(parent, id) 20 | 21 | const isTest = this.node.tryGetContext('isTest') === true 22 | 23 | this.surveysTable = new DynamoDB.Table(this, 'table', { 24 | billingMode: DynamoDB.BillingMode.PAY_PER_REQUEST, 25 | partitionKey: { 26 | name: 'surveyId', 27 | type: DynamoDB.AttributeType.STRING, 28 | }, 29 | pointInTimeRecovery: !isTest, 30 | removalPolicy: isTest 31 | ? CloudFormation.RemovalPolicy.DESTROY 32 | : CloudFormation.RemovalPolicy.RETAIN, 33 | stream: DynamoDB.StreamViewType.NEW_IMAGE, 34 | timeToLiveAttribute: 'ttl', 35 | }) 36 | 37 | this.surveysTable.addGlobalSecondaryIndex({ 38 | indexName: 'surveyByDevice', 39 | partitionKey: { 40 | name: 'deviceId', 41 | type: DynamoDB.AttributeType.STRING, 42 | }, 43 | sortKey: { 44 | name: 'timestamp', 45 | type: DynamoDB.AttributeType.STRING, 46 | }, 47 | projectionType: DynamoDB.ProjectionType.KEYS_ONLY, 48 | }) 49 | 50 | const topicRuleRole = new IAM.Role(this, 'Role', { 51 | assumedBy: new IAM.ServicePrincipal('iot.amazonaws.com'), 52 | inlinePolicies: { 53 | rootPermissions: new IAM.PolicyDocument({ 54 | statements: [ 55 | new IAM.PolicyStatement({ 56 | actions: ['iot:Publish'], 57 | resources: [ 58 | `arn:aws:iot:${parent.region}:${parent.account}:topic/errors`, 59 | ], 60 | }), 61 | new IAM.PolicyStatement({ 62 | actions: ['iot:GetThingShadow'], 63 | resources: ['*'], 64 | }), 65 | ], 66 | }), 67 | }, 68 | }) 69 | this.surveysTable.grantWriteData(topicRuleRole) 70 | 71 | new IoT.CfnTopicRule(this, 'storeSurvey', { 72 | topicRulePayload: { 73 | awsIotSqlVersion: '2016-03-23', 74 | description: 'Store all network surveys sent by devices in DynamoDB', 75 | ruleDisabled: false, 76 | sql: [ 77 | `SELECT newuuid() as surveyId,`, 78 | `clientid() as deviceId,`, 79 | `parse_time("yyyy-MM-dd'T'HH:mm:ss.S'Z'", timestamp()) as timestamp,`, 80 | `lte,`, 81 | `get_thing_shadow(clientid(), "${topicRuleRole.roleArn}").state.reported.roam.v.nw as nw,`, 82 | `wifi,`, 83 | // Delete survey after 30 days 84 | `floor(timestamp() / 1000) + 2592000 as ttl`, 85 | `FROM '+/ground-fix'`, 86 | ].join(' '), 87 | actions: [ 88 | { 89 | dynamoDBv2: { 90 | putItem: { 91 | tableName: this.surveysTable.tableName, 92 | }, 93 | roleArn: topicRuleRole.roleArn, 94 | }, 95 | }, 96 | ], 97 | errorAction: { 98 | republish: { 99 | roleArn: topicRuleRole.roleArn, 100 | topic: 'errors', 101 | }, 102 | }, 103 | }, 104 | }) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /cdk/resources/PGPSStorage.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import DynamoDB from 'aws-cdk-lib/aws-dynamodb' 3 | 4 | /** 5 | * Provides storage for P-GPS requests 6 | */ 7 | export class PGPSStorage extends CloudFormation.Resource { 8 | public readonly cacheTable: DynamoDB.ITable 9 | public constructor(parent: CloudFormation.Stack, id: string) { 10 | super(parent, id) 11 | 12 | this.cacheTable = new DynamoDB.Table(this, 'cacheTable', { 13 | billingMode: DynamoDB.BillingMode.PAY_PER_REQUEST, 14 | partitionKey: { 15 | name: 'cacheKey', 16 | type: DynamoDB.AttributeType.STRING, 17 | }, 18 | pointInTimeRecovery: true, 19 | removalPolicy: 20 | this.node.tryGetContext('isTest') === true 21 | ? CloudFormation.RemovalPolicy.DESTROY 22 | : CloudFormation.RemovalPolicy.RETAIN, 23 | timeToLiveAttribute: 'ttl', 24 | }) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /cdk/resources/RepublishDesiredConfig.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import IAM from 'aws-cdk-lib/aws-iam' 3 | import IoT from 'aws-cdk-lib/aws-iot' 4 | 5 | /** 6 | * This sets up the rules to republish the desired config 7 | * because the nRF9160 cannot handle messages larger than 2303 bytes. 8 | */ 9 | export class RepublishDesiredConfig extends CloudFormation.Resource { 10 | public constructor(parent: CloudFormation.Stack, id: string) { 11 | super(parent, id) 12 | 13 | const topicSuffix = 'desired/cfg' 14 | 15 | const role = new IAM.Role(this, 'Role', { 16 | assumedBy: new IAM.ServicePrincipal('iot.amazonaws.com'), 17 | inlinePolicies: { 18 | rootPermissions: new IAM.PolicyDocument({ 19 | statements: [ 20 | new IAM.PolicyStatement({ 21 | actions: ['iot:Publish'], 22 | resources: [ 23 | `arn:aws:iot:${parent.region}:${parent.account}:topic/$aws/things/*/shadow/get/accepted/${topicSuffix}`, 24 | `arn:aws:iot:${parent.region}:${parent.account}:topic/errors`, 25 | ], 26 | }), 27 | ], 28 | }), 29 | }, 30 | }) 31 | 32 | new IoT.CfnTopicRule(this, 'republishConfig', { 33 | topicRulePayload: { 34 | awsIotSqlVersion: '2016-03-23', 35 | description: 36 | 'republishes the desired config part of the device shadow on a sub topic to reduce message size', 37 | ruleDisabled: false, 38 | sql: `SELECT state.desired.cfg AS cfg FROM '$aws/things/+/shadow/get/accepted'`, 39 | actions: [ 40 | { 41 | republish: { 42 | roleArn: role.roleArn, 43 | topic: `\${topic()}/${topicSuffix}`, 44 | }, 45 | }, 46 | ], 47 | errorAction: { 48 | republish: { 49 | roleArn: role.roleArn, 50 | topic: 'errors', 51 | }, 52 | }, 53 | }, 54 | }) 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /cdk/resources/ThingGroup.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import type * as Lambda from 'aws-cdk-lib/aws-lambda' 3 | import type { Construct } from 'constructs' 4 | 5 | export class ThingGroup extends CloudFormation.Resource { 6 | public constructor( 7 | parent: Construct, 8 | id: string, 9 | { 10 | thingGroupLambda, 11 | name, 12 | description, 13 | addExisting, 14 | PolicyName, 15 | }: { 16 | thingGroupLambda: Lambda.IFunction 17 | name: string 18 | description: string 19 | PolicyName: string 20 | addExisting?: boolean 21 | }, 22 | ) { 23 | super(parent, id) 24 | 25 | new CloudFormation.CustomResource(this, 'ThingGroup', { 26 | serviceToken: thingGroupLambda.functionArn, 27 | properties: { 28 | ThingGroupName: name, 29 | ThingGroupProperties: { 30 | thingGroupDescription: description, 31 | }, 32 | PolicyName, 33 | AddExisting: addExisting === true, 34 | }, 35 | }) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /cdk/resources/ThingGroupLambda.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import IAM from 'aws-cdk-lib/aws-iam' 3 | import Lambda from 'aws-cdk-lib/aws-lambda' 4 | import type { CDKLambdas } from '../stacks/AssetTracker/lambdas.js' 5 | import type { LambdasWithLayer } from './LambdasWithLayer.js' 6 | import Logs from 'aws-cdk-lib/aws-logs' 7 | 8 | export class ThingGroupLambda extends CloudFormation.Resource { 9 | public readonly function: Lambda.IFunction 10 | public constructor( 11 | parent: CloudFormation.Stack, 12 | id: string, 13 | { 14 | cdkLambdas, 15 | }: { 16 | cdkLambdas: LambdasWithLayer 17 | }, 18 | ) { 19 | super(parent, id) 20 | 21 | this.function = new Lambda.Function(this, 'createThingGroup', { 22 | code: Lambda.Code.fromAsset(cdkLambdas.lambdas.createThingGroup.zipFile), 23 | layers: cdkLambdas.layers, 24 | description: 25 | 'Used in CloudFormation to create the thing group for the devices', 26 | handler: cdkLambdas.lambdas.createThingGroup.handler, 27 | architecture: Lambda.Architecture.ARM_64, 28 | runtime: Lambda.Runtime.NODEJS_20_X, 29 | timeout: CloudFormation.Duration.minutes(1), 30 | initialPolicy: [ 31 | new IAM.PolicyStatement({ 32 | resources: ['*'], 33 | actions: ['iot:*'], 34 | }), 35 | ], 36 | logRetention: Logs.RetentionDays.ONE_WEEK, 37 | }) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /cdk/resources/WebAppHosting.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import CloudFront from 'aws-cdk-lib/aws-cloudfront' 3 | import S3 from 'aws-cdk-lib/aws-s3' 4 | 5 | /** 6 | * This sets up the web hosting for a web app 7 | */ 8 | export class WebAppHosting extends CloudFormation.Resource { 9 | public readonly bucket: S3.IBucket 10 | public readonly distribution: CloudFront.CfnDistribution 11 | 12 | public constructor(parent: CloudFormation.Stack, id: string) { 13 | super(parent, id) 14 | 15 | this.bucket = new S3.Bucket(this, 'bucket', { 16 | publicReadAccess: true, 17 | cors: [ 18 | { 19 | allowedHeaders: ['*'], 20 | allowedMethods: [S3.HttpMethods.GET], 21 | allowedOrigins: ['*'], 22 | exposedHeaders: ['Date'], 23 | maxAge: 3600, 24 | }, 25 | ], 26 | removalPolicy: CloudFormation.RemovalPolicy.DESTROY, 27 | websiteIndexDocument: 'index.html', 28 | websiteErrorDocument: 'index.html', 29 | blockPublicAccess: { 30 | blockPublicAcls: false, 31 | ignorePublicAcls: false, 32 | restrictPublicBuckets: false, 33 | blockPublicPolicy: false, 34 | }, 35 | objectOwnership: S3.ObjectOwnership.OBJECT_WRITER, 36 | }) 37 | 38 | this.distribution = new CloudFront.CfnDistribution( 39 | this, 40 | 'websiteDistribution', 41 | { 42 | distributionConfig: { 43 | enabled: true, 44 | priceClass: 'PriceClass_100', 45 | defaultRootObject: 'index.html', 46 | defaultCacheBehavior: { 47 | allowedMethods: ['HEAD', 'GET', 'OPTIONS'], 48 | cachedMethods: ['HEAD', 'GET'], 49 | compress: true, 50 | forwardedValues: { 51 | queryString: true, 52 | headers: [ 53 | 'Access-Control-Request-Headers', 54 | 'Access-Control-Request-Method', 55 | 'Origin', 56 | ], 57 | }, 58 | smoothStreaming: false, 59 | targetOriginId: 'S3', 60 | viewerProtocolPolicy: 'redirect-to-https', 61 | }, 62 | ipv6Enabled: true, 63 | viewerCertificate: { 64 | cloudFrontDefaultCertificate: true, 65 | }, 66 | origins: [ 67 | { 68 | domainName: `${this.bucket.bucketName}.s3-website.${parent.region}.amazonaws.com`, 69 | id: 'S3', 70 | customOriginConfig: { 71 | originProtocolPolicy: 'http-only', 72 | }, 73 | }, 74 | ], 75 | }, 76 | }, 77 | ) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /cdk/stacks/WebAppCI.ts: -------------------------------------------------------------------------------- 1 | import CloudFormation from 'aws-cdk-lib' 2 | import Cognito from 'aws-cdk-lib/aws-cognito' 3 | import DynamoDB from 'aws-cdk-lib/aws-dynamodb' 4 | import { WebAppCI } from '../resources/WebAppCI.js' 5 | import { StackOutputs } from './AssetTracker/stack.js' 6 | import { WEBAPP_CI_STACK_NAME } from './stackName.js' 7 | 8 | export class WebAppCIStack extends CloudFormation.Stack { 9 | public constructor( 10 | parent: CloudFormation.App, 11 | props: { 12 | repository: { 13 | owner: string 14 | repo: string 15 | } 16 | }, 17 | ) { 18 | super(parent, WEBAPP_CI_STACK_NAME) 19 | 20 | new WebAppCI(this, 'webappCI', { 21 | userPool: Cognito.UserPool.fromUserPoolArn( 22 | this, 23 | 'userPoolArn', 24 | CloudFormation.Fn.importValue(StackOutputs.userPoolArn), 25 | ), 26 | cellGeoLocationCacheTable: DynamoDB.Table.fromTableArn( 27 | this, 28 | 'cellGeoLocationCacheTable', 29 | CloudFormation.Fn.importValue( 30 | StackOutputs.cellGeolocationCacheTableArn, 31 | ), 32 | ), 33 | networksurveyStorageTable: DynamoDB.Table.fromTableArn( 34 | this, 35 | 'networksurveyStorageTable', 36 | CloudFormation.Fn.importValue( 37 | StackOutputs.networkSurveyStorageTableArn, 38 | ), 39 | ), 40 | historicalDataTableArn: CloudFormation.Fn.importValue( 41 | StackOutputs.historicaldataTableArn, 42 | ), 43 | repository: props.repository, 44 | }) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /cdk/stacks/stackName.ts: -------------------------------------------------------------------------------- 1 | const STACK_NAME = process.env.STACK_NAME ?? 'nrf-asset-tracker' 2 | export const CORE_STACK_NAME = STACK_NAME 3 | export const WEBAPP_STACK_NAME = `${STACK_NAME}-webapp` 4 | export const CONTINUOUS_DEPLOYMENT_STACK_NAME = `${STACK_NAME}-continuous-deployment` 5 | export const WEBAPP_CI_STACK_NAME = `${STACK_NAME}-web-app-ci` 6 | export const HTTP_MOCK_HTTP_API_STACK_NAME = `${STACK_NAME}-mock-http-api` 7 | -------------------------------------------------------------------------------- /cdk/test-resources/api-mock-lambda.ts: -------------------------------------------------------------------------------- 1 | import { 2 | DeleteItemCommand, 3 | DynamoDBClient, 4 | GetItemCommand, 5 | PutItemCommand, 6 | } from '@aws-sdk/client-dynamodb' 7 | import type { APIGatewayEvent, APIGatewayProxyResult } from 'aws-lambda' 8 | import { randomUUID } from 'node:crypto' 9 | import querystring from 'querystring' 10 | import { splitMockResponse } from './splitMockResponse.js' 11 | 12 | const db = new DynamoDBClient({}) 13 | 14 | export const handler = async ( 15 | event: APIGatewayEvent, 16 | ): Promise => { 17 | console.log(JSON.stringify(event)) 18 | 19 | const pathWithQuery = `${event.path.replace(/^\//, '')}${ 20 | event.queryStringParameters !== null && 21 | event.queryStringParameters !== undefined 22 | ? `?${querystring.stringify(event.queryStringParameters)}` 23 | : '' 24 | }` 25 | 26 | await db.send( 27 | new PutItemCommand({ 28 | TableName: process.env.REQUESTS_TABLE_NAME, 29 | Item: { 30 | methodPathQuery: { 31 | S: `${event.httpMethod} ${pathWithQuery}`, 32 | }, 33 | requestId: { 34 | S: randomUUID(), 35 | }, 36 | method: { 37 | S: event.httpMethod, 38 | }, 39 | path: { 40 | S: pathWithQuery, 41 | }, 42 | body: { 43 | S: event.body ?? '{}', 44 | }, 45 | headers: { 46 | S: JSON.stringify(event.headers), 47 | }, 48 | ttl: { 49 | N: `${Math.round(Date.now() / 1000) + 5 * 60}`, 50 | }, 51 | }, 52 | }), 53 | ) 54 | 55 | // Check if response exists 56 | console.log( 57 | `Checking if response exists for ${event.httpMethod} ${pathWithQuery}...`, 58 | ) 59 | const { Item } = await db.send( 60 | new GetItemCommand({ 61 | TableName: process.env.RESPONSES_TABLE_NAME, 62 | Key: { 63 | methodPathQuery: { 64 | S: `${event.httpMethod} ${pathWithQuery}`, 65 | }, 66 | }, 67 | }), 68 | ) 69 | if (Item !== undefined) { 70 | console.log(JSON.stringify(Item)) 71 | await db.send( 72 | new DeleteItemCommand({ 73 | TableName: process.env.RESPONSES_TABLE_NAME, 74 | Key: { 75 | methodPathQuery: { 76 | S: `${event.httpMethod} ${pathWithQuery}`, 77 | }, 78 | }, 79 | }), 80 | ) 81 | 82 | const { body, headers } = splitMockResponse(Item.body?.S ?? '') 83 | 84 | // Send as binary, if mock response is HEX encoded. See https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-payload-encodings.html 85 | const isBinary = /^[0-9a-f]+$/.test(body) 86 | const res = { 87 | statusCode: parseInt(Item.statusCode?.N ?? '200', 10), 88 | headers: isBinary 89 | ? { 90 | ...headers, 91 | 'Content-Type': 'application/octet-stream', 92 | } 93 | : headers, 94 | body: isBinary 95 | ? /* body is HEX encoded */ Buffer.from(body, 'hex').toString('base64') 96 | : body, 97 | isBase64Encoded: isBinary, 98 | } 99 | console.log(JSON.stringify(res)) 100 | 101 | return res 102 | } else { 103 | console.log('no responses found') 104 | } 105 | 106 | return { statusCode: 404, body: '' } 107 | } 108 | -------------------------------------------------------------------------------- /cdk/test-resources/prepare-test-resources.ts: -------------------------------------------------------------------------------- 1 | import type { PackedLambda } from '../helper/lambdas/packLambda.js' 2 | import { packLambdaFromPath } from '../helper/lambdas/packLambdaFromPath.js' 3 | import { packLayer } from '../helper/lambdas/packLayer.js' 4 | 5 | export type HTTPAPIMockLambdas = { 6 | layerZipFileName: string 7 | lambdas: { 8 | httpApiMock: PackedLambda 9 | } 10 | } 11 | 12 | export const prepareHTTPAPIMockLambdas = 13 | async (): Promise => ({ 14 | layerZipFileName: ( 15 | await packLayer({ 16 | dependencies: ['@aws-sdk/client-dynamodb'], 17 | id: 'httpApiMock-layer', 18 | }) 19 | ).layerZipFile, 20 | lambdas: { 21 | httpApiMock: await packLambdaFromPath( 22 | 'httpApiMock', 23 | 'cdk/test-resources/api-mock-lambda.ts', 24 | ), 25 | }, 26 | }) 27 | -------------------------------------------------------------------------------- /cdk/test-resources/splitMockResponse.spec.ts: -------------------------------------------------------------------------------- 1 | import { splitMockResponse } from './splitMockResponse.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('split mock response', () => { 6 | void it('should parse headers and body', () => 7 | assert.deepEqual( 8 | splitMockResponse(`Content-Type: application/octet-stream 9 | 10 | (binary A-GNSS data) other types`), 11 | { 12 | headers: { 13 | 'Content-Type': 'application/octet-stream', 14 | }, 15 | body: '(binary A-GNSS data) other types', 16 | }, 17 | )) 18 | }) 19 | -------------------------------------------------------------------------------- /cdk/test-resources/splitMockResponse.ts: -------------------------------------------------------------------------------- 1 | export const splitMockResponse = ( 2 | r: string, 3 | ): { headers: Record; body: string } => { 4 | const blankLineLocation = r.indexOf('\n\n') 5 | if (blankLineLocation === -1) 6 | return { 7 | headers: {}, 8 | body: r, 9 | } 10 | return { 11 | headers: r 12 | .slice(0, blankLineLocation) 13 | .split('\n') 14 | .map((s) => s.split(':', 2)) 15 | .reduce((headers, [k, v]) => ({ ...headers, [k ?? '']: v?.trim() }), {}), 16 | body: r.slice(blankLineLocation + 2), 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /cellGeolocation/cellId.spec.ts: -------------------------------------------------------------------------------- 1 | import { cellId, NetworkMode } from './cellId.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | void describe('cellId', () => { 5 | void it('should generate a cellId (LTE-m)', () => { 6 | assert.equal( 7 | cellId({ 8 | nw: NetworkMode.LTEm, 9 | area: 42, 10 | mccmnc: 17, 11 | cell: 666, 12 | }), 13 | 'ltem-666-17-42', 14 | ) 15 | }) 16 | void it('should generate a cellId (NB-IoT)', () => { 17 | assert.equal( 18 | cellId({ 19 | nw: NetworkMode.NBIoT, 20 | area: 42, 21 | mccmnc: 17, 22 | cell: 666, 23 | }), 24 | 'nbiot-666-17-42', 25 | ) 26 | }) 27 | }) 28 | -------------------------------------------------------------------------------- /cellGeolocation/cellId.ts: -------------------------------------------------------------------------------- 1 | export enum NetworkMode { 2 | LTEm = 'ltem', 3 | NBIoT = 'nbiot', 4 | } 5 | 6 | export const cellId = ({ 7 | nw, 8 | area, 9 | mccmnc, 10 | cell, 11 | }: { 12 | nw: NetworkMode 13 | area: number 14 | mccmnc: number 15 | cell: number 16 | }): string => `${nw}-${cell}-${mccmnc}-${area}` 17 | -------------------------------------------------------------------------------- /cellGeolocation/geolocateFromCache.ts: -------------------------------------------------------------------------------- 1 | import type { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import { GetItemCommand } from '@aws-sdk/client-dynamodb' 3 | import { ErrorType, type ErrorInfo } from '../api/ErrorInfo.js' 4 | import type { Cell } from '../geolocation/Cell.js' 5 | import type { Location } from '../geolocation/Location.js' 6 | import { cellId } from './cellId.js' 7 | import type { LocationSource } from './stepFunction/types.js' 8 | 9 | export const geolocateFromCache = 10 | ({ dynamodb, TableName }: { dynamodb: DynamoDBClient; TableName: string }) => 11 | async ( 12 | cell: Cell, 13 | ): Promise< 14 | | { error: ErrorInfo } 15 | | (({ unresolved: true } | { unresolved: false; source: LocationSource }) & 16 | Partial) 17 | > => { 18 | try { 19 | const { Item } = await dynamodb.send( 20 | new GetItemCommand({ 21 | TableName, 22 | Key: { 23 | cellId: { 24 | S: cellId(cell), 25 | }, 26 | }, 27 | ExpressionAttributeNames: { 28 | '#nw': 'nw', 29 | '#lat': 'lat', 30 | '#lng': 'lng', 31 | '#accuracy': 'accuracy', 32 | '#unresolved': 'unresolved', 33 | '#source': 'source', 34 | }, 35 | ProjectionExpression: '#nw,#lat,#lng,#accuracy,#unresolved,#source', 36 | }), 37 | ) 38 | if (Item) { 39 | const unresolved = Item.unresolved?.BOOL ?? false 40 | if (unresolved) { 41 | return { 42 | unresolved, 43 | } 44 | } else { 45 | return { 46 | unresolved, 47 | lat: parseFloat(Item?.lat?.N as string), 48 | lng: parseFloat(Item?.lng?.N as string), 49 | accuracy: 50 | Item?.accuracy?.N !== undefined 51 | ? parseFloat(Item.accuracy.N) 52 | : 5000, 53 | source: Item?.source?.S as LocationSource, 54 | } 55 | } 56 | } 57 | throw new Error('NOT_FOUND') 58 | } catch (err) { 59 | if ( 60 | (err as Error).message === 'NOT_FOUND' || 61 | (err as Error).name === 'ResourceNotFoundException' 62 | ) 63 | return { 64 | error: { 65 | type: ErrorType.EntityNotFound, 66 | message: `Cell ${cellId(cell)} not found!`, 67 | }, 68 | } 69 | console.error( 70 | JSON.stringify({ 71 | geolocateFromCache: { 72 | err, 73 | cell, 74 | TableName, 75 | }, 76 | }), 77 | ) 78 | return { 79 | error: { 80 | type: ErrorType.InternalError, 81 | message: (err as Error).message, 82 | }, 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /cellGeolocation/httpApi/cell.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import { SQSClient } from '@aws-sdk/client-sqs' 3 | import { validateWithType } from '@nordicsemiconductor/asset-tracker-cloud-docs/protocol' 4 | import { Type } from '@sinclair/typebox' 5 | import type { 6 | APIGatewayProxyEventV2, 7 | APIGatewayProxyResultV2, 8 | } from 'aws-lambda' 9 | import { ErrorType, toStatusCode } from '../../api/ErrorInfo.js' 10 | import { res } from '../../api/res.js' 11 | import { queueJob } from '../../geolocation/queueJob.js' 12 | import { fromEnv } from '../../util/fromEnv.js' 13 | import { geolocateFromCache } from '../geolocateFromCache.js' 14 | import { NetworkMode, cellId } from '../cellId.js' 15 | 16 | const { cellGeolocationResolutionJobsQueue, cacheTable } = fromEnv({ 17 | cellGeolocationResolutionJobsQueue: 'CELL_GEOLOCATION_RESOLUTION_JOBS_QUEUE', 18 | cacheTable: 'CACHE_TABLE', 19 | })(process.env) 20 | 21 | const locator = geolocateFromCache({ 22 | dynamodb: new DynamoDBClient({}), 23 | TableName: cacheTable, 24 | }) 25 | 26 | const q = queueJob({ 27 | QueueUrl: cellGeolocationResolutionJobsQueue, 28 | sqs: new SQSClient({}), 29 | }) 30 | 31 | const cellInputSchema = Type.Object( 32 | { 33 | nw: Type.Enum(NetworkMode), 34 | cell: Type.Number({ 35 | minimum: 1, 36 | }), 37 | area: Type.Number({ 38 | minimum: 1, 39 | }), 40 | mccmnc: Type.Number({ 41 | minimum: 10000, 42 | }), 43 | // Allow cache busting 44 | ts: Type.Optional( 45 | Type.Number({ 46 | minimum: 1, 47 | }), 48 | ), 49 | }, 50 | { additionalProperties: false }, 51 | ) 52 | 53 | const validateInput = validateWithType(cellInputSchema) 54 | 55 | const allMembersToInt = (o: Record): Record => 56 | Object.entries(o).reduce( 57 | (o, [k, v]) => ({ ...o, [k]: v !== undefined ? parseInt(v, 10) : 0 }), 58 | {}, 59 | ) 60 | 61 | export const handler = async ( 62 | event: APIGatewayProxyEventV2, 63 | ): Promise => { 64 | console.log(JSON.stringify(event)) 65 | 66 | const maybeValidInput = validateInput({ 67 | ...allMembersToInt(event.queryStringParameters ?? {}), 68 | nw: event?.queryStringParameters?.nw ?? '', 69 | }) 70 | if ('errors' in maybeValidInput) { 71 | return res(toStatusCode[ErrorType.BadRequest])(maybeValidInput.errors) 72 | } 73 | const cell = await locator(maybeValidInput.value) 74 | 75 | if ('error' in cell) { 76 | const scheduled = await q({ 77 | payload: maybeValidInput.value, 78 | deduplicationId: cellId(maybeValidInput.value), 79 | }) 80 | if (scheduled !== undefined && 'error' in scheduled) { 81 | return res(toStatusCode[scheduled.error.type], { 82 | expires: 60, 83 | })(scheduled.error) 84 | } 85 | return res(toStatusCode[ErrorType.Conflict], { 86 | expires: 60, 87 | })({ 88 | type: ErrorType.Conflict, 89 | message: 'Calculation for cell geolocation in process', 90 | }) 91 | } 92 | if (cell.unresolved) { 93 | return res(toStatusCode[ErrorType.EntityNotFound], { 94 | expires: 86400, 95 | })({ 96 | type: ErrorType.EntityNotFound, 97 | message: `cell geolocation not found!`, 98 | }) 99 | } 100 | return res(200, { 101 | expires: 86400, 102 | })({ 103 | lat: cell.lat, 104 | lng: cell.lng, 105 | accuracy: cell.accuracy, 106 | source: cell.source, 107 | }) 108 | } 109 | -------------------------------------------------------------------------------- /cellGeolocation/lambda/invokeStepFunctionFromSQS.ts: -------------------------------------------------------------------------------- 1 | import { SFNClient, StartExecutionCommand } from '@aws-sdk/client-sfn' 2 | import type { SQSEvent } from 'aws-lambda' 3 | import { fromEnv } from '../../util/fromEnv.js' 4 | 5 | const sf = new SFNClient({}) 6 | const { stateMachineArn } = fromEnv({ 7 | stateMachineArn: 'STEP_FUNCTION_ARN', 8 | })(process.env) 9 | 10 | export const handler = async (event: SQSEvent): Promise => { 11 | console.log(JSON.stringify({ event })) 12 | const res = await Promise.all( 13 | event.Records.map(async ({ body }) => 14 | sf.send( 15 | new StartExecutionCommand({ 16 | stateMachineArn, 17 | input: body, 18 | }), 19 | ), 20 | ), 21 | ) 22 | console.log(JSON.stringify(res)) 23 | } 24 | -------------------------------------------------------------------------------- /cellGeolocation/stepFunction/fromCache.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import type { Cell } from '../../geolocation/Cell.js' 3 | import { fromEnv } from '../../util/fromEnv.js' 4 | import { geolocateFromCache } from '../geolocateFromCache.js' 5 | import type { MaybeCellGeoLocation } from './types.js' 6 | 7 | const { cacheTable } = fromEnv({ 8 | cacheTable: 'CACHE_TABLE', 9 | })(process.env) 10 | 11 | const locator = geolocateFromCache({ 12 | dynamodb: new DynamoDBClient({}), 13 | TableName: cacheTable, 14 | }) 15 | 16 | export const handler = async (input: Cell): Promise => { 17 | const optionalLocation = await locator(input) 18 | if ('error' in optionalLocation || optionalLocation.unresolved) 19 | return { located: false } 20 | return { 21 | located: true, 22 | ...optionalLocation, 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /cellGeolocation/stepFunction/types.ts: -------------------------------------------------------------------------------- 1 | import type { Location } from '../../geolocation/Location.js' 2 | 3 | export enum LocationSource { 4 | MCELL = 'MCELL', 5 | SCELL = 'SCELL', 6 | WIFI = 'WIFI', 7 | } 8 | 9 | export type MaybeCellGeoLocation = ( 10 | | { 11 | located: false 12 | } 13 | | { 14 | located: true 15 | source: LocationSource 16 | } 17 | ) & 18 | Partial 19 | -------------------------------------------------------------------------------- /cellGeolocation/stepFunction/updateCache.ts: -------------------------------------------------------------------------------- 1 | import { DynamoDBClient, PutItemCommand } from '@aws-sdk/client-dynamodb' 2 | import type { Cell } from '../../geolocation/Cell.js' 3 | import { fromEnv } from '../../util/fromEnv.js' 4 | import type { MaybeCellGeoLocation } from './types.js' 5 | import { cellId } from '../cellId.js' 6 | 7 | const { TableName } = fromEnv({ 8 | TableName: 'CACHE_TABLE', 9 | })(process.env) 10 | const dynamodb = new DynamoDBClient({}) 11 | 12 | export const handler = async ( 13 | maybeLocatedCell: Cell & { 14 | cellgeo: MaybeCellGeoLocation 15 | }, 16 | ): Promise => { 17 | console.log( 18 | JSON.stringify({ 19 | geolocatedCell: maybeLocatedCell, 20 | }), 21 | ) 22 | const { located } = maybeLocatedCell.cellgeo 23 | let Item = { 24 | cellId: { 25 | S: cellId(maybeLocatedCell), 26 | }, 27 | ttl: { 28 | N: `${Math.round(Date.now() / 1000) + 24 * 60 * 60}`, 29 | }, 30 | } 31 | if (located) { 32 | const { lat, lng, accuracy, source } = maybeLocatedCell.cellgeo 33 | Item = { 34 | ...Item, 35 | ...{ 36 | lat: { 37 | N: `${lat}`, 38 | }, 39 | lng: { 40 | N: `${lng}`, 41 | }, 42 | accuracy: { 43 | N: `${accuracy}`, 44 | }, 45 | source: { 46 | S: `${source}`, 47 | }, 48 | }, 49 | } 50 | } else { 51 | Item = { 52 | ...Item, 53 | ...{ 54 | unresolved: { 55 | BOOL: true, 56 | }, 57 | }, 58 | } 59 | } 60 | 61 | await dynamodb.send( 62 | new PutItemCommand({ 63 | TableName, 64 | Item, 65 | }), 66 | ) 67 | 68 | return true 69 | } 70 | -------------------------------------------------------------------------------- /cli.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | node --import tsx --no-warnings ./cli/cli.ts "$@" -------------------------------------------------------------------------------- /cli/cd/listPipelines.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { stackOutput } from '@nordicsemiconductor/cloudformation-helpers' 3 | import type { StackOutputs } from '../../cdk/stacks/ContinuousDeployment.js' 4 | import { 5 | CONTINUOUS_DEPLOYMENT_STACK_NAME, 6 | CORE_STACK_NAME, 7 | } from '../../cdk/stacks/stackName.js' 8 | 9 | /** 10 | * Returns the active pipelines of the CD stack 11 | */ 12 | export const listPipelines = async (): Promise => { 13 | const cf = new CloudFormationClient({}) 14 | const config = await stackOutput(cf)( 15 | CONTINUOUS_DEPLOYMENT_STACK_NAME, 16 | ) 17 | 18 | const pipelines = [`${CORE_STACK_NAME}-continuous-deployment`] 19 | if (config.webAppCD === 'enabled') 20 | pipelines.push(`${CORE_STACK_NAME}-continuous-deployment-webAppCD`) 21 | return pipelines 22 | } 23 | -------------------------------------------------------------------------------- /cli/commands/CommandDefinition.ts: -------------------------------------------------------------------------------- 1 | export type CommandDefinition = { 2 | command: string 3 | action: (...args: any) => Promise 4 | options?: { flags: string; description?: string; defaultValue?: any }[] 5 | help: string 6 | } 7 | -------------------------------------------------------------------------------- /cli/commands/cd-update-token.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CodePipelineClient, 3 | GetPipelineCommand, 4 | UpdatePipelineCommand, 5 | } from '@aws-sdk/client-codepipeline' 6 | import { SSMClient } from '@aws-sdk/client-ssm' 7 | import chalk from 'chalk' 8 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 9 | import { putSettings } from '../../util/settings.js' 10 | import { listPipelines } from '../cd/listPipelines.js' 11 | import type { CommandDefinition } from './CommandDefinition.js' 12 | 13 | export const cdUpdateTokenCommand = (): CommandDefinition => ({ 14 | command: 'cd-update-token ', 15 | action: async (token: string) => { 16 | const ssm = new SSMClient({}) 17 | 18 | await putSettings({ 19 | ssm, 20 | stackName: CORE_STACK_NAME, 21 | scope: 'codebuild', 22 | system: 'github', 23 | })({ 24 | property: 'token', 25 | value: token, 26 | }) 27 | 28 | const cp = new CodePipelineClient({}) 29 | const pipelines = await listPipelines() 30 | await Promise.all( 31 | pipelines.map(async (name) => { 32 | const { pipeline } = await cp.send( 33 | new GetPipelineCommand({ 34 | name, 35 | }), 36 | ) 37 | if (pipeline !== undefined) { 38 | console.log(JSON.stringify(pipeline, null, 2)) 39 | await cp.send( 40 | new UpdatePipelineCommand({ 41 | pipeline: { 42 | ...pipeline, 43 | stages: [ 44 | ...(pipeline.stages?.map((stage) => ({ 45 | ...stage, 46 | actions: [ 47 | ...(stage.actions?.map((action) => ({ 48 | ...action, 49 | configuration: { 50 | ...action.configuration, 51 | ...(action.configuration?.OAuthToken !== undefined 52 | ? { OAuthToken: token } 53 | : {}), 54 | }, 55 | })) ?? []), 56 | ], 57 | })) ?? []), 58 | ], 59 | }, 60 | }), 61 | ) 62 | console.log(chalk.green(`${name}`)) 63 | } 64 | }), 65 | ) 66 | }, 67 | help: 'Update the GitHub token used in the continuous deployment pipeline', 68 | }) 69 | -------------------------------------------------------------------------------- /cli/commands/cd.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CodePipelineClient, 3 | ListPipelineExecutionsCommand, 4 | } from '@aws-sdk/client-codepipeline' 5 | import chalk from 'chalk' 6 | import { formatDistanceToNow } from 'date-fns' 7 | import { listPipelines } from '../cd/listPipelines.js' 8 | import type { CommandDefinition } from './CommandDefinition.js' 9 | 10 | export const cdCommand = (): CommandDefinition => ({ 11 | command: 'cd', 12 | action: async () => { 13 | const pipelines = await listPipelines() 14 | const cp = new CodePipelineClient({}) 15 | const statuses = await Promise.all( 16 | pipelines.map(async (name) => 17 | cp 18 | .send( 19 | new ListPipelineExecutionsCommand({ 20 | pipelineName: name, 21 | maxResults: 1, 22 | }), 23 | ) 24 | .then(({ pipelineExecutionSummaries }) => ({ 25 | pipelineName: name, 26 | summary: { 27 | status: 'Unknown', 28 | lastUpdateTime: new Date(), 29 | ...pipelineExecutionSummaries?.[0], 30 | }, 31 | })) 32 | .catch(() => ({ 33 | pipelineName: name, 34 | summary: { 35 | status: 'Unknown', 36 | lastUpdateTime: new Date(), 37 | }, 38 | })), 39 | ), 40 | ) 41 | statuses.forEach(({ pipelineName, summary }) => { 42 | console.log( 43 | ( 44 | { 45 | Succeeded: chalk.green.inverse(' OK '), 46 | InProgress: chalk.yellow.inverse(' In Progress '), 47 | Superseded: chalk.gray('[Superseded]'), 48 | Failed: chalk.red.inverse(' ERR '), 49 | Unknown: chalk.bgRedBright(' ?? '), 50 | } as { [key: string]: any } 51 | )[summary.status || 'Unknown'], 52 | chalk.cyan(pipelineName), 53 | chalk.gray( 54 | formatDistanceToNow(summary.lastUpdateTime, { addSuffix: true }), 55 | ), 56 | ) 57 | }) 58 | }, 59 | help: 'Show continuous deployment status', 60 | }) 61 | -------------------------------------------------------------------------------- /cli/commands/configure.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import chalk from 'chalk' 3 | import fs from 'fs' 4 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 5 | import { deleteSettings, putSettings } from '../../util/settings.js' 6 | import type { CommandDefinition } from './CommandDefinition.js' 7 | 8 | export const configureCommand = (): CommandDefinition => ({ 9 | command: 'configure [value]', 10 | options: [ 11 | { 12 | flags: '-d, --deleteBeforeUpdate', 13 | description: `Useful when depending on the parameter having version 1, e.g. for use in CloudFormation`, 14 | }, 15 | { 16 | flags: '-X, --deleteParameter', 17 | description: 'Deletes the parameter.', 18 | }, 19 | ], 20 | action: async ( 21 | scope: any, 22 | system: any, 23 | property: string, 24 | value: string | undefined, 25 | { deleteBeforeUpdate, deleteParameter }, 26 | ) => { 27 | const ssm = new SSMClient({}) 28 | 29 | if (deleteParameter !== undefined) { 30 | // Delete 31 | const { name } = await deleteSettings({ 32 | ssm, 33 | stackName: CORE_STACK_NAME, 34 | scope, 35 | system, 36 | })({ 37 | property, 38 | }) 39 | console.log() 40 | console.log( 41 | chalk.green('Deleted the parameters from'), 42 | chalk.blueBright(name), 43 | ) 44 | return 45 | } 46 | 47 | const v = value ?? fs.readFileSync(0, 'utf-8') 48 | if (v === undefined || v.length === 0) { 49 | throw new Error(`Must provide value either as argument or via stdin!`) 50 | } 51 | 52 | const { name } = await putSettings({ 53 | ssm, 54 | stackName: CORE_STACK_NAME, 55 | scope, 56 | system, 57 | })({ 58 | property, 59 | value: v, 60 | deleteBeforeUpdate, 61 | }) 62 | 63 | console.log() 64 | console.log( 65 | chalk.green('Updated the configuration'), 66 | chalk.blueBright(name), 67 | chalk.green('to'), 68 | chalk.yellow(v), 69 | ) 70 | }, 71 | help: 'Configure the system. If value is not provided, it is read from stdin', 72 | }) 73 | -------------------------------------------------------------------------------- /cli/commands/create-ca.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { IoTClient } from '@aws-sdk/client-iot' 3 | import chalk from 'chalk' 4 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 5 | import { createCA, defaultCAValidityInDays } from '../jitp/createCA.js' 6 | import { setCurrentCA } from '../jitp/currentCA.js' 7 | import type { CommandDefinition } from './CommandDefinition.js' 8 | 9 | export const createCACommand = ({ 10 | certsDir, 11 | }: { 12 | certsDir: string 13 | }): CommandDefinition => ({ 14 | command: 'create-ca', 15 | options: [ 16 | { 17 | flags: '-e, --expires ', 18 | description: `Validity of device certificate in days. Defaults to ${defaultCAValidityInDays} days.`, 19 | }, 20 | { 21 | flags: '-t, --tags ', 22 | description: `Comma-separated list of tags to assign to the CA certificate (tag1=value1,tag2=value2,tag3).`, 23 | }, 24 | ], 25 | action: async ({ expires, tags }: { expires?: string; tags?: string }) => { 26 | const iot = new IoTClient({}) 27 | const cf = new CloudFormationClient({}) 28 | 29 | const { certificateId } = await createCA({ 30 | certsDir, 31 | iot, 32 | cf, 33 | stack: CORE_STACK_NAME, 34 | log: (...message: any[]) => { 35 | console.log(...message.map((m) => chalk.magenta(m))) 36 | }, 37 | debug: (...message: any[]) => { 38 | console.log(...message.map((m) => chalk.cyan(m))) 39 | }, 40 | daysValid: expires !== undefined ? parseInt(expires, 10) : undefined, 41 | tags: (tags ?? '') 42 | .split(',') 43 | .map((tagDefinition) => tagDefinition.split('=', 2)) 44 | .map(([Key, Value]) => ({ Key, Value: Value ?? '' })) 45 | .filter(({ Key }) => Key !== ''), 46 | }) 47 | console.log( 48 | chalk.green(`CA certificate ${chalk.yellow(certificateId)} registered.`), 49 | ) 50 | console.log( 51 | chalk.green('You can now generate device certificates.'), 52 | chalk.greenBright('./cli.sh create-and-provision-device-cert.js'), 53 | ) 54 | setCurrentCA({ certsDir, caId: certificateId }) 55 | }, 56 | help: 'Creates a CA certificate and registers it for Just-in-time provisioning.', 57 | }) 58 | -------------------------------------------------------------------------------- /cli/commands/imei.ts: -------------------------------------------------------------------------------- 1 | import { 2 | atHostHexfile, 3 | connect, 4 | getIMEI, 5 | } from '@nordicsemiconductor/device-helpers' 6 | import chalk from 'chalk' 7 | import type { CommandDefinition } from './CommandDefinition.js' 8 | import { defaultPort } from './create-and-provision-device-cert.js' 9 | 10 | export const imeiCommand = (): CommandDefinition => ({ 11 | command: 'imei', 12 | options: [ 13 | { 14 | flags: '-p, --port ', 15 | description: `The port the device is connected to, defaults to ${defaultPort}`, 16 | }, 17 | { 18 | flags: '--dk', 19 | description: `Connected device is a 9160 DK`, 20 | }, 21 | { 22 | flags: '-a, --at-host ', 23 | description: `Flash at_host from this file`, 24 | }, 25 | { 26 | flags: '--debug', 27 | description: `Log debug messages`, 28 | }, 29 | ], 30 | action: async ({ dk, port, atHost, debug }) => { 31 | const connection = await connect({ 32 | atHostHexfile: 33 | atHost ?? 34 | (dk === true ? atHostHexfile['9160dk'] : atHostHexfile['thingy91']), 35 | device: port ?? defaultPort, 36 | warn: console.error, 37 | debug: debug === true ? console.debug : undefined, 38 | progress: debug === true ? console.log : undefined, 39 | }) 40 | 41 | const imei = await getIMEI({ at: connection.connection.at }) 42 | 43 | console.log() 44 | console.log(chalk.green('Connected device is'), chalk.blueBright(imei)) 45 | 46 | await connection.connection.end() 47 | }, 48 | help: 'Prints the IMEI of the connected device', 49 | }) 50 | -------------------------------------------------------------------------------- /cli/commands/info.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { IoTClient } from '@aws-sdk/client-iot' 3 | import { stackOutput } from '@nordicsemiconductor/cloudformation-helpers' 4 | import chalk from 'chalk' 5 | import { getIotEndpoint } from '../../cdk/helper/getIotEndpoint.js' 6 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 7 | import type { CommandDefinition } from './CommandDefinition.js' 8 | 9 | export const infoCommand = (): CommandDefinition => ({ 10 | command: 'info', 11 | options: [ 12 | { 13 | flags: '-o, --output ', 14 | description: 'If set, only return the value of this output', 15 | }, 16 | ], 17 | action: async ({ output }) => { 18 | const outputs = { 19 | ...(await stackOutput(new CloudFormationClient({}))(CORE_STACK_NAME)), 20 | mqttEndpoint: await getIotEndpoint(new IoTClient({})), 21 | } as Record 22 | if (output !== undefined) { 23 | if (outputs[output] === undefined) { 24 | throw new Error(`${output} is not defined.`) 25 | } 26 | process.stdout.write(outputs[output] ?? '') 27 | return 28 | } 29 | Object.entries(outputs).forEach(([k, v]) => { 30 | console.log(chalk.yellow(k), chalk.green(v)) 31 | }) 32 | }, 33 | help: 'Prints information about your stack', 34 | }) 35 | -------------------------------------------------------------------------------- /cli/commands/purge-buckets.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CloudFormationClient, 3 | DescribeStackResourcesCommand, 4 | } from '@aws-sdk/client-cloudformation' 5 | import { 6 | DeleteBucketCommand, 7 | DeleteObjectCommand, 8 | ListObjectsCommand, 9 | S3Client, 10 | } from '@aws-sdk/client-s3' 11 | import chalk from 'chalk' 12 | import { 13 | CONTINUOUS_DEPLOYMENT_STACK_NAME, 14 | CORE_STACK_NAME, 15 | WEBAPP_STACK_NAME, 16 | } from '../../cdk/stacks/stackName.js' 17 | import { paginate } from '../../util/paginate.js' 18 | import type { CommandDefinition } from './CommandDefinition.js' 19 | import { retry } from './retry.js' 20 | 21 | const cf = new CloudFormationClient({}) 22 | 23 | const listBuckets = async (StackName: string) => 24 | cf 25 | .send(new DescribeStackResourcesCommand({ StackName })) 26 | 27 | .then( 28 | (res) => 29 | res?.StackResources?.filter( 30 | ({ ResourceType }) => ResourceType === 'AWS::S3::Bucket', 31 | ).map(({ PhysicalResourceId }) => PhysicalResourceId as string) ?? [], 32 | ) 33 | .catch(({ message }) => { 34 | console.warn(chalk.yellow.dim(message)) 35 | return [] 36 | }) 37 | 38 | export const purgeBucketsCommand = (): CommandDefinition => ({ 39 | command: 'purge-buckets', 40 | action: async () => { 41 | const buckets = [ 42 | ...(await listBuckets(CORE_STACK_NAME)), 43 | ...(await listBuckets(WEBAPP_STACK_NAME)), 44 | ...(await listBuckets(CONTINUOUS_DEPLOYMENT_STACK_NAME)), 45 | ] 46 | const s3 = new S3Client({}) 47 | await Promise.all( 48 | buckets 49 | .filter((b) => b) 50 | .map(async (bucketName) => { 51 | console.log( 52 | chalk.magenta.dim('Purging bucket'), 53 | chalk.blue.dim(bucketName), 54 | ) 55 | try { 56 | await retry( 57 | 3, 58 | () => 5000, 59 | )(async () => { 60 | await paginate({ 61 | paginator: async (nextMarker?: string) => { 62 | const { Contents, Marker } = await s3.send( 63 | new ListObjectsCommand({ 64 | Bucket: bucketName, 65 | Marker: nextMarker, 66 | }), 67 | ) 68 | 69 | if (!Contents) { 70 | console.log(chalk.green.dim(`${bucketName} is empty.`)) 71 | } else { 72 | await Promise.all( 73 | Contents.map(async (obj) => { 74 | console.log( 75 | chalk.magenta.dim(bucketName), 76 | chalk.blue.dim(obj.Key), 77 | ) 78 | return s3.send( 79 | new DeleteObjectCommand({ 80 | Bucket: bucketName, 81 | Key: `${obj.Key}`, 82 | }), 83 | ) 84 | }), 85 | ) 86 | } 87 | return Marker 88 | }, 89 | }) 90 | await s3.send(new DeleteBucketCommand({ Bucket: bucketName })) 91 | console.log(chalk.green(`${bucketName} deleted.`)) 92 | }) 93 | } catch (err) { 94 | console.error( 95 | chalk.yellow.dim( 96 | `Failed to purge bucket ${bucketName}: ${ 97 | (err as Error).message 98 | }`, 99 | ), 100 | ) 101 | } 102 | }), 103 | ) 104 | }, 105 | help: 'Purges all S3 buckets (used during CI runs)', 106 | }) 107 | -------------------------------------------------------------------------------- /cli/commands/purge-cas.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { 3 | DeleteCACertificateCommand, 4 | DescribeCACertificateCommand, 5 | IoTClient, 6 | UpdateCACertificateCommand, 7 | } from '@aws-sdk/client-iot' 8 | import { stackOutput } from '@nordicsemiconductor/cloudformation-helpers' 9 | import chalk from 'chalk' 10 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 11 | import { getCurrentCA } from '../jitp/currentCA.js' 12 | import { listRegisteredCAs } from '../jitp/listRegisteredCAs.js' 13 | import type { CommandDefinition } from './CommandDefinition.js' 14 | 15 | const purgeCACertificate = 16 | ({ iot, thingGroupName }: { iot: IoTClient; thingGroupName: string }) => 17 | async (certificateId: string) => { 18 | const cert = await iot.send( 19 | new DescribeCACertificateCommand({ 20 | certificateId, 21 | }), 22 | ) 23 | 24 | const config = JSON.parse(cert.registrationConfig?.templateBody ?? '{}') 25 | if ( 26 | ( 27 | (config?.Resources?.thing?.Properties?.ThingGroups as string[]) ?? [] 28 | ).includes(thingGroupName) 29 | ) { 30 | console.log(`Marking CA certificate ${certificateId} as INACTIVE ...`) 31 | await iot.send( 32 | new UpdateCACertificateCommand({ 33 | certificateId, 34 | newStatus: 'INACTIVE', 35 | }), 36 | ) 37 | 38 | console.log(`Deleting CA certificate ${certificateId}...`) 39 | await iot.send( 40 | new DeleteCACertificateCommand({ 41 | certificateId, 42 | }), 43 | ) 44 | } else { 45 | console.error( 46 | chalk.yellow.dim(`Not a nRF Asset Tracker CA: ${certificateId}`), 47 | ) 48 | } 49 | } 50 | 51 | export const purgeCAsCommand = ({ 52 | certsDir, 53 | }: { 54 | certsDir: string 55 | }): CommandDefinition => ({ 56 | command: 'purge-cas', 57 | options: [ 58 | { 59 | flags: '-i, --caId ', 60 | description: 'CA ID, if left blank all CAs will be purged', 61 | }, 62 | { 63 | flags: '-c, --current', 64 | description: 'Purge current CA', 65 | }, 66 | ], 67 | action: async ({ caId, current }: { caId?: string; current?: boolean }) => { 68 | const iot = new IoTClient({}) 69 | const { thingGroupName } = { 70 | ...(await stackOutput(new CloudFormationClient({}))(CORE_STACK_NAME)), 71 | } as { [key: string]: string } 72 | 73 | const purge = purgeCACertificate({ 74 | iot, 75 | thingGroupName: thingGroupName as string, 76 | }) 77 | 78 | if (caId !== undefined) return purge(caId) 79 | if (current === true) return purge(getCurrentCA({ certsDir })) 80 | 81 | for (const id of Object.values(listRegisteredCAs({ iot }))) { 82 | await purge(id) 83 | } 84 | }, 85 | help: 'Purges all nRF Asset Tracker CAs', 86 | }) 87 | -------------------------------------------------------------------------------- /cli/commands/purge-iot-user-policy-principals.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { 3 | DetachPolicyCommand, 4 | IoTClient, 5 | ListTargetsForPolicyCommand, 6 | } from '@aws-sdk/client-iot' 7 | import { stackOutput } from '@nordicsemiconductor/cloudformation-helpers' 8 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 9 | import { paginate } from '../../util/paginate.js' 10 | import type { CommandDefinition } from './CommandDefinition.js' 11 | 12 | export const purgeIotUserPolicyPrincipals = (): CommandDefinition => ({ 13 | command: 'purge-iot-user-policy-principals', 14 | action: async () => { 15 | const { userIotPolicyName } = { 16 | ...(await stackOutput(new CloudFormationClient({}))(CORE_STACK_NAME)), 17 | } as { [key: string]: string } 18 | const iot = new IoTClient({}) 19 | await paginate({ 20 | paginator: async (marker?: any) => { 21 | const { targets, nextMarker } = await iot.send( 22 | new ListTargetsForPolicyCommand({ 23 | policyName: userIotPolicyName, 24 | marker, 25 | }), 26 | ) 27 | 28 | await Promise.all( 29 | targets?.map(async (target) => { 30 | console.log( 31 | `Detaching principal ${target} from policy ${userIotPolicyName} ...`, 32 | ) 33 | return iot.send( 34 | new DetachPolicyCommand({ 35 | policyName: userIotPolicyName, 36 | target, 37 | }), 38 | ) 39 | }) ?? [], 40 | ) 41 | return nextMarker 42 | }, 43 | }) 44 | }, 45 | help: 'Purges all principals from the user IoT policy', 46 | }) 47 | -------------------------------------------------------------------------------- /cli/commands/register-ca.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import { IoTClient } from '@aws-sdk/client-iot' 3 | import chalk from 'chalk' 4 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 5 | import { setCurrentCA } from '../jitp/currentCA.js' 6 | import { registerCA } from '../jitp/registerCA.js' 7 | import type { CommandDefinition } from './CommandDefinition.js' 8 | 9 | export const registerCACommand = ({ 10 | certsDir, 11 | }: { 12 | certsDir: string 13 | }): CommandDefinition => ({ 14 | command: 'register-ca ', 15 | options: [ 16 | { 17 | flags: '-t, --tags ', 18 | description: `Comma-separated list of tags to assign to the CA certificate (tag1=value1,tag2=value2,tag3).`, 19 | }, 20 | ], 21 | action: async (caCertificate, caKey, { tags }: { tags?: string }) => { 22 | const iot = new IoTClient({}) 23 | const cf = new CloudFormationClient({}) 24 | 25 | const { certificateId } = await registerCA({ 26 | iot, 27 | cf, 28 | certsDir, 29 | caCertificateFile: caCertificate, 30 | caCertificateKeyFile: caKey, 31 | stack: CORE_STACK_NAME, 32 | tags: (tags ?? '') 33 | .split(',') 34 | .map((tagDefinition) => tagDefinition.split('=', 2)) 35 | .map(([Key, Value]) => ({ Key, Value: Value ?? '' })) 36 | .filter(({ Key }) => Key !== ''), 37 | log: (...message: any[]) => { 38 | console.log(...message.map((m) => chalk.magenta(m))) 39 | }, 40 | debug: (...message: any[]) => { 41 | console.log(...message.map((m) => chalk.cyan(m))) 42 | }, 43 | }) 44 | 45 | console.log( 46 | chalk.green(`CA certificate ${chalk.yellow(certificateId)} registered.`), 47 | ) 48 | console.log( 49 | chalk.green('You can now generate device certificates.'), 50 | chalk.greenBright('./cli.sh create-and-provision-device-cert.js'), 51 | ) 52 | setCurrentCA({ certsDir, caId: certificateId }) 53 | }, 54 | help: 'Registers an existing CA certificate for Just-in-time provisioning.', 55 | }) 56 | -------------------------------------------------------------------------------- /cli/commands/retry.spec.ts: -------------------------------------------------------------------------------- 1 | import { retry } from './retry.js' 2 | import { describe, it, mock } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('retry()', () => { 6 | void it('should run a passed function and return the resolved promise', async () => { 7 | assert.equal(await retry(3, () => 1)(async () => Promise.resolve(42)), 42) 8 | }) 9 | void it('should retry a passed function if it fails', async () => { 10 | const expectedError = new Error('Foo') 11 | const f = mock.fn(async () => Promise.reject(expectedError)) 12 | try { 13 | await retry(3, () => 1)(f) 14 | } catch (err) { 15 | assert.equal(err, err) 16 | } 17 | assert.equal(f.mock.callCount(), 3) 18 | }) 19 | void it('should return success value after a failed try', async () => { 20 | const err = new Error('Foo') 21 | const f = mock.fn() 22 | f.mock.mockImplementationOnce(async () => Promise.reject(err), 0) 23 | f.mock.mockImplementationOnce(async () => Promise.resolve(42), 1) 24 | assert.equal(await retry(3, () => 1)(f as any), 42) 25 | assert.equal(f.mock.callCount(), 2) 26 | }) 27 | }) 28 | -------------------------------------------------------------------------------- /cli/commands/retry.ts: -------------------------------------------------------------------------------- 1 | export const retry = 2 | ( 3 | tries: number, 4 | backoff: (numTry: number) => number = (numTry) => numTry * 1000, 5 | ) => 6 | async (fn: () => Promise): Promise => { 7 | let triesLeft = tries 8 | let lastError: Error 9 | do { 10 | triesLeft-- 11 | try { 12 | const res = await fn() 13 | return res 14 | } catch (err) { 15 | if (triesLeft <= 0) throw err 16 | lastError = err as Error 17 | await new Promise((resolve) => { 18 | setTimeout(resolve, backoff(tries - triesLeft), []) 19 | }) 20 | } 21 | } while (triesLeft > 0) 22 | throw lastError 23 | } 24 | -------------------------------------------------------------------------------- /cli/commands/show-api-configuration.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import { setting } from '../../cdk/helper/note.js' 3 | import { CORE_STACK_NAME } from '../../cdk/stacks/stackName.js' 4 | import { getSettings } from '../../util/settings.js' 5 | import type { CommandDefinition } from './CommandDefinition.js' 6 | 7 | export const showAPIConfigurationCommand = (): CommandDefinition => ({ 8 | command: 'show-api-configuration ', 9 | action: async (scope: any, api: any) => { 10 | const ssm = new SSMClient({}) 11 | 12 | const cfg = await getSettings({ 13 | ssm, 14 | stackName: CORE_STACK_NAME, 15 | scope, 16 | system: api, 17 | })() 18 | 19 | console.log() 20 | Object.entries(cfg).forEach(([k, v]) => setting(k, v)) 21 | }, 22 | help: 'Show the API configuration', 23 | }) 24 | -------------------------------------------------------------------------------- /cli/commands/web-app-ci.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk' 2 | import { 3 | WEBAPP_CI_STACK_NAME, 4 | WEBAPP_STACK_NAME, 5 | } from '../../cdk/stacks/stackName.js' 6 | import { fromEnv } from '../../util/fromEnv.js' 7 | import type { CommandDefinition } from './CommandDefinition.js' 8 | 9 | const { region } = fromEnv({ region: 'AWS_REGION' })(process.env) 10 | 11 | export const webappCICommand = ({ 12 | accountId, 13 | }: { 14 | accountId: string 15 | }): CommandDefinition => ({ 16 | command: 'web-app-ci', 17 | help: 'Print web app CI environment', 18 | action: async () => { 19 | const info = { 20 | AWS_REGION: region, 21 | AWS_ROLE: `arn:aws:iam::${accountId}:role/${WEBAPP_CI_STACK_NAME}-github-actions`, 22 | WEBAPP_STACK_NAME, 23 | } 24 | 25 | console.log(chalk.white(`Configure these GitHub Actions secrets:`)) 26 | console.log() 27 | Object.entries(info).forEach(([k, v]) => 28 | console.log(chalk.blue(`${k}:`), chalk.magenta(v)), 29 | ) 30 | console.log() 31 | console.log( 32 | chalk.white(`Using the GitHub CLI`), 33 | chalk.gray(`(in the web-app repository)`), 34 | ) 35 | console.log() 36 | Object.entries(info).forEach(([k, v]) => 37 | console.log( 38 | chalk.yellow(`gh secret set ${k} --body ${v} --env production`), 39 | ), 40 | ) 41 | }, 42 | }) 43 | -------------------------------------------------------------------------------- /cli/commands/web-app-config.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import { objectToEnv } from '@nordicsemiconductor/object-to-env' 3 | import { 4 | CORE_STACK_NAME, 5 | WEBAPP_STACK_NAME, 6 | } from '../../cdk/stacks/stackName.js' 7 | import { getSettings } from '../../util/settings.js' 8 | import type { CommandDefinition } from './CommandDefinition.js' 9 | import { getSentrySettings } from '../../third-party/sentry.io/settings.js' 10 | 11 | const ssm = new SSMClient({}) 12 | 13 | export const webAppConfigCommand = ({ 14 | mqttEndpoint, 15 | }: { 16 | mqttEndpoint: string 17 | }): CommandDefinition => ({ 18 | command: 'web-app-config', 19 | options: [ 20 | { 21 | flags: '-p, --prefix ', 22 | description: `Prefix printed environment variables with this string. Defaults to "export PUBLIC_".`, 23 | }, 24 | { 25 | flags: '-Q, --no-quote', 26 | description: `Whether to quote values.`, 27 | }, 28 | ], 29 | action: async ({ prefix, quote }: { prefix?: string; quote: boolean }) => { 30 | process.stdout.write( 31 | objectToEnv( 32 | { 33 | ...(await getSettings>({ 34 | ssm, 35 | system: 'stack', 36 | scope: 'config', 37 | stackName: WEBAPP_STACK_NAME, 38 | })()), 39 | ...(await getSentrySettings({ 40 | ssm, 41 | stackName: CORE_STACK_NAME, 42 | })()), 43 | region: process.env.AWS_REGION, 44 | mqttEndpoint, 45 | }, 46 | { 47 | prefix: prefix ?? 'export PUBLIC_', 48 | quote: quote ? '"' : '', 49 | }, 50 | ), 51 | ) 52 | }, 53 | help: 'Prints the stack outputs as environment variables.', 54 | }) 55 | -------------------------------------------------------------------------------- /cli/jitp/caFileLocations.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | 3 | export const caFileLocations = ({ 4 | id, 5 | certsDir, 6 | }: { 7 | id: string 8 | certsDir: string 9 | }): { 10 | cert: string 11 | key: string 12 | } => ({ 13 | cert: path.resolve(certsDir, `${id}.pem`), 14 | key: path.resolve(certsDir, `${id}.key`), 15 | }) 16 | -------------------------------------------------------------------------------- /cli/jitp/certsDir.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk' 2 | import { promises as fs } from 'fs' 3 | import path from 'path' 4 | 5 | /** 6 | * Ensures the directory for storing certificates is available 7 | */ 8 | export const certsDir = async ({ 9 | accountId, 10 | iotEndpoint, 11 | workingDirectory, 12 | }: { 13 | accountId: string 14 | iotEndpoint: string 15 | workingDirectory?: string 16 | }): Promise => { 17 | const dir = path.resolve( 18 | path.join( 19 | workingDirectory ?? process.cwd(), 20 | 'certificates', 21 | `${accountId}-${iotEndpoint}`, 22 | ), 23 | ) 24 | try { 25 | await fs.stat(dir) 26 | } catch { 27 | await fs.mkdir(dir, { recursive: true }) 28 | console.error(chalk.magenta(`[certsDir]`), chalk.grey(`${dir} created.`)) 29 | } 30 | return dir 31 | } 32 | -------------------------------------------------------------------------------- /cli/jitp/createCA.ts: -------------------------------------------------------------------------------- 1 | import type { CloudFormationClient } from '@aws-sdk/client-cloudformation' 2 | import type { IoTClient, Tag } from '@aws-sdk/client-iot' 3 | import { randomUUID } from 'crypto' 4 | import { mkdir, stat, unlink } from 'fs/promises' 5 | import run from '@bifravst/run' 6 | import { caFileLocations } from './caFileLocations.js' 7 | import { registerCA } from './registerCA.js' 8 | 9 | export const defaultCAValidityInDays = 356 10 | 11 | /** 12 | * Creates a CA certificate and registers it for Just-in-time provisioning 13 | * @see https://docs.aws.amazon.com/iot/latest/developerguide/device-certs-your-own.html 14 | */ 15 | export const createCA = async (args: { 16 | certsDir: string 17 | iot: IoTClient 18 | cf: CloudFormationClient 19 | stack: string 20 | subject?: string 21 | attributes?: Record 22 | log: (...message: any[]) => void 23 | debug: (...message: any[]) => void 24 | daysValid?: number 25 | tags?: Tag[] 26 | }): Promise<{ certificateId: string }> => { 27 | const { certsDir, log, debug, iot, cf } = args 28 | try { 29 | await stat(certsDir) 30 | } catch { 31 | await mkdir(certsDir) 32 | log(`Created ${certsDir}`) 33 | } 34 | 35 | const caFiles = caFileLocations({ id: randomUUID(), certsDir }) 36 | 37 | await run({ 38 | command: 'openssl', 39 | args: ['genrsa', '-out', caFiles.key, '2048'], 40 | log: { debug }, 41 | }) 42 | 43 | await run({ 44 | command: 'openssl', 45 | args: [ 46 | 'req', 47 | '-x509', 48 | '-new', 49 | '-nodes', 50 | '-key', 51 | caFiles.key, 52 | '-sha256', 53 | '-days', 54 | `${args.daysValid ?? defaultCAValidityInDays}`, 55 | '-out', 56 | caFiles.cert, 57 | '-subj', 58 | `/OU=${args.subject ?? args.stack}`, 59 | ], 60 | log: { debug }, 61 | }) 62 | 63 | const { certificateId } = await registerCA({ 64 | iot, 65 | cf, 66 | certsDir, 67 | stack: args.stack, 68 | caCertificateFile: caFiles.cert, 69 | caCertificateKeyFile: caFiles.key, 70 | attributes: args.attributes, 71 | tags: args.tags, 72 | log, 73 | debug, 74 | }) 75 | 76 | await Promise.all([unlink(caFiles.cert), unlink(caFiles.key)]) 77 | 78 | return { 79 | certificateId, 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /cli/jitp/createDeviceCertificate.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs } from 'fs' 2 | import os from 'os' 3 | import run from '@bifravst/run' 4 | import { caFileLocations } from './caFileLocations.js' 5 | import { deviceFileLocations } from './deviceFileLocations.js' 6 | 7 | export const defaultDeviceCertificateValidityInDays = 10950 8 | 9 | /** 10 | * Creates a certificate for a device, signed with the CA 11 | * @see https://docs.aws.amazon.com/iot/latest/developerguide/device-certs-your-own.html 12 | * 13 | * The device's CSR must already exist. 14 | */ 15 | export const createDeviceCertificate = async ({ 16 | certsDir, 17 | caId, 18 | log, 19 | debug, 20 | deviceId, 21 | daysValid, 22 | }: { 23 | certsDir: string 24 | caId: string 25 | deviceId: string 26 | log?: (...message: any[]) => void 27 | debug?: (...message: any[]) => void 28 | daysValid?: number 29 | }): Promise<{ deviceId: string }> => { 30 | try { 31 | await fs.stat(certsDir) 32 | } catch { 33 | throw new Error(`${certsDir} does not exist.`) 34 | } 35 | 36 | log?.(`Generating certificate for device ${deviceId}`) 37 | const caFiles = caFileLocations({ certsDir, id: caId }) 38 | const deviceFiles = deviceFileLocations({ 39 | certsDir, 40 | deviceId, 41 | }) 42 | 43 | await run({ 44 | command: 'openssl', 45 | args: [ 46 | 'x509', 47 | '-req', 48 | '-in', 49 | deviceFiles.csr, 50 | '-CAkey', 51 | caFiles.key, 52 | '-CA', 53 | caFiles.cert, 54 | '-CAcreateserial', 55 | '-out', 56 | deviceFiles.cert, 57 | '-days', 58 | `${daysValid ?? defaultDeviceCertificateValidityInDays}`, 59 | '-sha256', 60 | ], 61 | log: { debug }, 62 | }) 63 | 64 | const certWithCa = ( 65 | await Promise.all([ 66 | fs.readFile(deviceFiles.cert, 'utf-8'), 67 | fs.readFile(caFiles.cert, 'utf-8'), 68 | ]) 69 | ).join(os.EOL) 70 | 71 | await fs.writeFile(deviceFiles.certWithCA, certWithCa, 'utf-8') 72 | 73 | return { deviceId } 74 | } 75 | -------------------------------------------------------------------------------- /cli/jitp/createSimulatorKeyAndCSR.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs } from 'fs' 2 | import run from '@bifravst/run' 3 | import { deviceFileLocations } from './deviceFileLocations.js' 4 | 5 | /** 6 | * Creates a private key and a CSR for a simulated device 7 | */ 8 | export const createSimulatorKeyAndCSR = async ({ 9 | certsDir, 10 | log, 11 | debug, 12 | deviceId, 13 | }: { 14 | certsDir: string 15 | deviceId: string 16 | log?: (...message: any[]) => void 17 | debug?: (...message: any[]) => void 18 | }): Promise<{ deviceId: string }> => { 19 | try { 20 | await fs.stat(certsDir) 21 | } catch { 22 | throw new Error(`${certsDir} does not exist.`) 23 | } 24 | 25 | log?.(`Generating key for device ${deviceId}`) 26 | 27 | const deviceFiles = deviceFileLocations({ 28 | certsDir, 29 | deviceId, 30 | }) 31 | 32 | await run({ 33 | command: 'openssl', 34 | args: [ 35 | 'ecparam', 36 | '-out', 37 | deviceFiles.key, 38 | '-name', 39 | 'prime256v1', 40 | '-genkey', 41 | ], 42 | log: { debug }, 43 | }) 44 | 45 | log?.(`Generating CSR for device ${deviceId}`) 46 | 47 | await run({ 48 | command: 'openssl', 49 | args: [ 50 | 'req', 51 | '-new', 52 | '-key', 53 | deviceFiles.key, 54 | '-out', 55 | deviceFiles.csr, 56 | '-subj', 57 | `/CN=${deviceId}`, 58 | ], 59 | log: { debug }, 60 | }) 61 | 62 | return { deviceId } 63 | } 64 | -------------------------------------------------------------------------------- /cli/jitp/currentCA.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync, writeFileSync } from 'fs' 2 | import path from 'path' 3 | 4 | export const getCurrentCA = ({ certsDir }: { certsDir: string }): string => { 5 | try { 6 | return readFileSync(path.join(certsDir, 'currentCA'), 'utf-8') 7 | } catch (error) { 8 | throw new Error( 9 | `Could not determine current CA: ${(error as Error).message}`, 10 | ) 11 | } 12 | } 13 | 14 | export const setCurrentCA = ({ 15 | certsDir, 16 | caId, 17 | }: { 18 | certsDir: string 19 | caId: string 20 | }): void => { 21 | writeFileSync(path.join(certsDir, 'currentCA'), caId, 'utf-8') 22 | } 23 | -------------------------------------------------------------------------------- /cli/jitp/deviceFileLocations.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | 3 | export const deviceFileLocations = ({ 4 | certsDir, 5 | deviceId, 6 | }: { 7 | certsDir: string 8 | deviceId: string 9 | }): { 10 | key: string 11 | csr: string 12 | cert: string 13 | certWithCA: string 14 | simulatorJSON: string 15 | } => ({ 16 | key: path.resolve(certsDir, `device-${deviceId}.key`), 17 | csr: path.resolve(certsDir, `device-${deviceId}.csr`), 18 | cert: path.resolve(certsDir, `device-${deviceId}.pem`), 19 | certWithCA: path.resolve(certsDir, `device-${deviceId}.bundle.pem`), 20 | simulatorJSON: path.resolve(certsDir, `device-${deviceId}.json`), 21 | }) 22 | -------------------------------------------------------------------------------- /cli/jitp/fingerprint.ts: -------------------------------------------------------------------------------- 1 | import { execSync } from 'child_process' 2 | 3 | export const fingerprint = (pem: string): string => 4 | execSync('openssl x509 -noout -fingerprint -sha1 -inform pem', { 5 | input: Buffer.from(pem), 6 | }) 7 | .toString() 8 | .replace(/^sha1 Fingerprint=/, '') 9 | .replace(/:/g, '') 10 | .trim() 11 | -------------------------------------------------------------------------------- /cli/jitp/listLocalCAs.ts: -------------------------------------------------------------------------------- 1 | import { readdir, readFile } from 'fs/promises' 2 | import path from 'path' 3 | import { fingerprint } from './fingerprint.js' 4 | 5 | export const listLocalCAs = async ({ 6 | certsDir, 7 | }: { 8 | certsDir: string 9 | }): Promise> => { 10 | const certs = (await readdir(certsDir)).filter((s) => s.endsWith('CA.pem')) 11 | const localCAs: Record = {} 12 | 13 | for (const filename of certs) { 14 | localCAs[ 15 | fingerprint(await readFile(path.join(certsDir, filename), 'utf-8')) 16 | ] = filename 17 | } 18 | 19 | return localCAs 20 | } 21 | -------------------------------------------------------------------------------- /cli/jitp/listRegisteredCAs.ts: -------------------------------------------------------------------------------- 1 | import type { IoTClient } from '@aws-sdk/client-iot' 2 | import { 3 | DescribeCACertificateCommand, 4 | ListCACertificatesCommand, 5 | } from '@aws-sdk/client-iot' 6 | import { paginate } from '../../util/paginate.js' 7 | import { fingerprint } from './fingerprint.js' 8 | 9 | /** 10 | * Returns a map of the registered CA IDs and their fingerprints 11 | */ 12 | export const listRegisteredCAs = async ({ 13 | iot, 14 | }: { 15 | iot: IoTClient 16 | }): Promise> => { 17 | const certs: Record = {} 18 | 19 | await paginate({ 20 | paginator: async (marker) => { 21 | const res = await iot.send(new ListCACertificatesCommand({ marker })) 22 | for (const { certificateId } of res.certificates ?? []) { 23 | const description = await iot.send( 24 | new DescribeCACertificateCommand({ 25 | certificateId: certificateId as string, 26 | }), 27 | ) 28 | certs[ 29 | fingerprint(description.certificateDescription?.certificatePem ?? '') 30 | ] = certificateId as string 31 | } 32 | return res.nextMarker 33 | }, 34 | }) 35 | 36 | return certs 37 | } 38 | -------------------------------------------------------------------------------- /cli/jitp/readlineDevice.ts: -------------------------------------------------------------------------------- 1 | import type { Connection } from '@nordicsemiconductor/device-helpers' 2 | import chalk from 'chalk' 3 | import readline from 'readline' 4 | import { defaultFirmwareRepository } from '../commands/flash-firmware.js' 5 | 6 | /** 7 | * Provides a device that uses readline as the UART interface and requires a human to provide the input. 8 | * Useful if you do not have physical access to the device. 9 | */ 10 | export const readlineDevice = async (): Promise => { 11 | const rl = readline.createInterface({ 12 | input: process.stdin, 13 | output: process.stdout, 14 | }) 15 | console.log('') 16 | console.log(chalk.white('Please program the device with the AT host.')) 17 | console.log('') 18 | console.log(chalk.gray('You can find a pre-compiled HEX file on')) 19 | console.log( 20 | chalk.blue.underline('https://github.com/NordicSemiconductor/at_host-hex'), 21 | ) 22 | console.log('') 23 | await new Promise((resolve) => 24 | rl.question('Press Enter to continue ...', resolve), 25 | ) 26 | console.log('') 27 | console.log(chalk.white('Connect to the device using UART and')) 28 | console.log( 29 | chalk.white('execute the AT commands printed in'), 30 | chalk.blueBright('blue'), 31 | chalk.white('.'), 32 | ) 33 | console.log('') 34 | console.log( 35 | chalk.white('Provide the response without the'), 36 | chalk.whiteBright.bold('OK'), 37 | ) 38 | console.log(chalk.white('if the response contains data,')) 39 | console.log(chalk.white('otherwise press Enter.')) 40 | console.log('') 41 | 42 | return { 43 | at: async (cmd) => { 44 | console.log(chalk.blueBright('>'), chalk.blueBright(cmd)) 45 | const response = await new Promise((resolve) => 46 | rl.question( 47 | `${chalk.white( 48 | `Please provide the device's response:`, 49 | )}\n${chalk.green('<')} `, 50 | resolve, 51 | ), 52 | ) 53 | return [response as string] 54 | }, 55 | end: async () => { 56 | console.log('') 57 | console.log( 58 | chalk.white( 59 | 'Now program the device with the asset_tracker_v2 firmware.', 60 | ), 61 | ) 62 | console.log('') 63 | console.log(chalk.gray('You can find a pre-compiled HEX file on')) 64 | console.log(chalk.blue.underline(defaultFirmwareRepository)) 65 | console.log('') 66 | }, 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /commitlint.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { extends: ["@commitlint/config-conventional"] }; 2 | -------------------------------------------------------------------------------- /context.cfg.dist: -------------------------------------------------------------------------------- 1 | webapp=1 2 | device=1 3 | cd=0 4 | -------------------------------------------------------------------------------- /continuous-deployment-web-app.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | env: 4 | shell: bash 5 | 6 | phases: 7 | install: 8 | runtime-versions: 9 | nodejs: 18 10 | commands: 11 | # Next will install Node.js 20 12 | - n 20 13 | - npm install --global npm 14 | build: 15 | commands: 16 | # Install dependencies of nRF Asset Tracker for AWS 17 | - npm ci --no-audit 18 | # Export web app configuration 19 | - $(./cli.sh web-app-config -Q) 20 | - export INITIATOR_PIPELINE_NAME=`echo $CODEBUILD_INITIATOR | cut -d'/' 21 | -f2` 22 | - export SOURCE_VERSION=`aws codepipeline get-pipeline-state --name 23 | $INITIATOR_PIPELINE_NAME | jq -r '.stageStates[0].actionStates[] | 24 | select(.actionName == "WebAppSourceCode").currentRevision.revisionId'` 25 | - export PUBLIC_VERSION=`cd $CODEBUILD_SRC_DIR_WebApp/; git ls-remote 26 | --tags $SOURCE_REPO_URL | grep $SOURCE_VERSION | cut -d'/' -f3 | cut 27 | -d'^' -f1` 28 | - export PUBLIC_VERSION=${PUBLIC_VERSION:-$SOURCE_VERSION} 29 | - export PUBLIC_URL="https://$PUBLIC_WEB_APP_DOMAIN_NAME" 30 | # Build web app 31 | - cd $CODEBUILD_SRC_DIR_WebApp/; npm ci --no-audit; npm run build 32 | # Upload to S3 33 | - aws s3 cp $CODEBUILD_SRC_DIR_WebApp/build 34 | s3://$PUBLIC_WEB_APP_BUCKET_NAME --recursive --metadata-directive 35 | REPLACE --cache-control 'public,max-age=600' --expires '' 36 | # Trigger CloudFront cache invalidation for index.html (all other files have unique names) 37 | - aws cloudfront create-invalidation --distribution-id 38 | $PUBLIC_CLOUDFRONT_DISTRIBUTION_ID --paths /,/index.html 39 | -------------------------------------------------------------------------------- /continuous-deployment.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | phases: 3 | install: 4 | runtime-versions: 5 | nodejs: 18 6 | commands: 7 | # Next will install Node.js 20 8 | - n 20 9 | - npm install --global npm 10 | - npm ci --no-audit 11 | build: 12 | commands: 13 | - npx cdk deploy --trace -v --require-approval never '*' 14 | -------------------------------------------------------------------------------- /data/AmazonRootCA1.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF 3 | ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 4 | b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL 5 | MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv 6 | b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj 7 | ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM 8 | 9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw 9 | IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 10 | VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L 11 | 93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm 12 | jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC 13 | AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA 14 | A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI 15 | U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs 16 | N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv 17 | o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU 18 | 5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy 19 | rqXRfboQnoZsG4q5WTP468SQvvG5 20 | -----END CERTIFICATE----- 21 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import config from '@bifravst/eslint-config-typescript' 2 | export default [...config, { ignores: ['dist/**', 'cdk.out/**'] }] 3 | -------------------------------------------------------------------------------- /export.d.ts: -------------------------------------------------------------------------------- 1 | export * from './cli/jitp/caFileLocations.js' 2 | export * from './cli/jitp/certsDir.js' 3 | export * from './cli/jitp/createCA.js' 4 | export * from './cli/jitp/createDeviceCertificate.js' 5 | export * from './cli/jitp/deviceFileLocations.js' 6 | export * from './feature-runner/steps.js' 7 | -------------------------------------------------------------------------------- /export.js: -------------------------------------------------------------------------------- 1 | export * from './dist/cli/jitp/caFileLocations.js' 2 | export * from './dist/cli/jitp/certsDir.js' 3 | export * from './dist/cli/jitp/createCA.js' 4 | export * from './dist/cli/jitp/createDeviceCertificate.js' 5 | export * from './dist/cli/jitp/deviceFileLocations.js' 6 | export * from './dist/feature-runner/steps.js' 7 | -------------------------------------------------------------------------------- /feature-runner/console-reporter.ts: -------------------------------------------------------------------------------- 1 | import { 2 | consoleReporter, 3 | type SuiteResult, 4 | } from '@nordicsemiconductor/bdd-markdown' 5 | 6 | const onlyFailed = process.argv.includes('--only-failed') 7 | const withTimestamps = process.argv.includes('--with-timestamps') 8 | 9 | const chunks: string[] = [] 10 | 11 | process.stdin.on('data', (chunk) => chunks.push(chunk.toString())) 12 | 13 | await new Promise((resolve) => process.stdin.on('end', resolve)) 14 | 15 | let res: SuiteResult 16 | try { 17 | res = JSON.parse(chunks.join('')) 18 | } catch (error) { 19 | throw new Error(`Failed to parse result JSON: ${(error as Error).message}`) 20 | } 21 | 22 | consoleReporter(res, console.log, { 23 | onlyFailed, 24 | withTimestamps, 25 | }) 26 | 27 | if (!res.ok) process.exit(1) 28 | -------------------------------------------------------------------------------- /feature-runner/steps/cognito.ts: -------------------------------------------------------------------------------- 1 | import { 2 | type StepRunner, 3 | regExpMatchedStep, 4 | } from '@nordicsemiconductor/bdd-markdown' 5 | import type { World } from '../run-features.js' 6 | import { Type } from '@sinclair/typebox' 7 | import { matchString } from './util.js' 8 | import { 9 | CognitoIdentityClient, 10 | GetCredentialsForIdentityCommand, 11 | GetOpenIdTokenForDeveloperIdentityCommand, 12 | } from '@aws-sdk/client-cognito-identity' 13 | import { 14 | AdminInitiateAuthCommand, 15 | CognitoIdentityProviderClient, 16 | AdminConfirmSignUpCommand, 17 | } from '@aws-sdk/client-cognito-identity-provider' 18 | 19 | const ci = new CognitoIdentityClient({}) 20 | const cisp = new CognitoIdentityProviderClient({}) 21 | 22 | export type UserCredentials = { 23 | IdToken: string 24 | IdentityId: string 25 | Token: string 26 | AccessKeyId: string 27 | SecretKey: string 28 | SessionToken: string 29 | AccessToken: string 30 | } 31 | const userCredentials: Record = {} 32 | 33 | const steps: StepRunner[] = [ 34 | regExpMatchedStep( 35 | { 36 | regExp: new RegExp( 37 | `^I am authenticated with Cognito as ${matchString( 38 | 'email', 39 | )} with password ${matchString('password')}$`, 40 | ), 41 | schema: Type.Object({ 42 | email: Type.String(), 43 | password: Type.String(), 44 | }), 45 | }, 46 | async ({ match: { email, password }, context, log: { progress } }) => { 47 | if (userCredentials[email] === undefined) { 48 | await cisp.send( 49 | new AdminConfirmSignUpCommand({ 50 | Username: email, 51 | UserPoolId: context.userPoolId, 52 | }), 53 | ) 54 | 55 | const { AuthenticationResult } = await cisp.send( 56 | new AdminInitiateAuthCommand({ 57 | AuthFlow: 'ADMIN_NO_SRP_AUTH', 58 | UserPoolId: context.userPoolId, 59 | ClientId: context.userPoolClientId, 60 | AuthParameters: { 61 | USERNAME: email, 62 | PASSWORD: password, 63 | }, 64 | }), 65 | ) 66 | 67 | const { IdentityId, Token } = await ci.send( 68 | new GetOpenIdTokenForDeveloperIdentityCommand({ 69 | IdentityPoolId: context.identityPoolId, 70 | Logins: { 71 | [context.developerProviderName]: email, 72 | }, 73 | TokenDuration: 3600, 74 | }), 75 | ) 76 | 77 | const { Credentials } = await ci.send( 78 | new GetCredentialsForIdentityCommand({ 79 | IdentityId: IdentityId!, 80 | Logins: { 81 | ['cognito-identity.amazonaws.com']: Token!, 82 | }, 83 | }), 84 | ) 85 | 86 | userCredentials[email] = { 87 | IdToken: AuthenticationResult!.IdToken!, 88 | IdentityId: IdentityId!, 89 | Token: Token!, 90 | AccessKeyId: Credentials!.AccessKeyId!, 91 | SecretKey: Credentials!.SecretKey!, 92 | SessionToken: Credentials!.SessionToken!, 93 | AccessToken: AuthenticationResult!.AccessToken!, 94 | } 95 | 96 | progress(`IdentityId: ${userCredentials[email]?.IdentityId}`) 97 | progress(`Token: ${userCredentials[email]?.Token}`) 98 | progress(`AccessKeyId: ${userCredentials[email]?.AccessKeyId}`) 99 | progress(`SecretKey: ${userCredentials[email]?.SecretKey}`) 100 | progress(`SessionToken: ${userCredentials[email]?.SessionToken}`) 101 | progress(`AccessToken: ${userCredentials[email]?.AccessToken}`) 102 | } 103 | context.cognito = userCredentials[email] 104 | }, 105 | ), 106 | ] 107 | export default steps 108 | -------------------------------------------------------------------------------- /feature-runner/steps/random.ts: -------------------------------------------------------------------------------- 1 | import { 2 | type StepRunner, 3 | regExpMatchedStep, 4 | } from '@nordicsemiconductor/bdd-markdown' 5 | import type { World } from '../run-features.js' 6 | import { Type } from '@sinclair/typebox' 7 | import { randomUUID } from 'node:crypto' 8 | import { matchChoice, matchString } from './util.js' 9 | 10 | const steps: StepRunner>[] = [ 11 | regExpMatchedStep( 12 | { 13 | regExp: new RegExp( 14 | `^I have a random email in ${matchString('storageName')}$`, 15 | ), 16 | schema: Type.Object({ 17 | storageName: Type.String(), 18 | }), 19 | }, 20 | async ({ match: { storageName }, context }) => { 21 | context[storageName] = `${randomUUID()}@example.com` 22 | }, 23 | ), 24 | regExpMatchedStep( 25 | { 26 | regExp: new RegExp( 27 | `^I have a random password in ${matchString('storageName')}$`, 28 | ), 29 | schema: Type.Object({ 30 | storageName: Type.String(), 31 | }), 32 | }, 33 | async ({ context, match: { storageName } }) => { 34 | context[storageName] = ((pw: string) => 35 | `${pw[0]?.toUpperCase()}${pw.slice(1)}${Math.round( 36 | Math.random() * 1000, 37 | )}`)( 38 | `${Math.random() 39 | .toString(36) 40 | .replace(/[^a-z]+/g, '')}${Math.random() 41 | .toString(36) 42 | .replace(/[^a-z]+/g, '')}`, 43 | ) 44 | }, 45 | ), 46 | regExpMatchedStep( 47 | { 48 | regExp: new RegExp( 49 | `^I have a random UUID in ${matchString('storageName')}$`, 50 | ), 51 | schema: Type.Object({ 52 | storageName: Type.String(), 53 | }), 54 | }, 55 | async ({ match: { storageName }, context }) => { 56 | context[storageName] = randomUUID() 57 | }, 58 | ), 59 | regExpMatchedStep( 60 | { 61 | regExp: new RegExp( 62 | `^I have a random ${matchChoice('type', [ 63 | 'number', 64 | 'float', 65 | ])} between ${matchString('min')} and ${matchString( 66 | 'max', 67 | )} in ${matchString('storageName')}$`, 68 | ), 69 | schema: Type.Object({ 70 | min: Type.String(), 71 | max: Type.String(), 72 | type: Type.Union([Type.Literal('number'), Type.Literal('float')]), 73 | storageName: Type.String(), 74 | }), 75 | }, 76 | async ({ 77 | match: { storageName, min: minString, max: maxString, type }, 78 | context, 79 | }) => { 80 | const max = parseInt(maxString, 10) 81 | const min = parseInt(minString, 10) 82 | context[storageName] = min + Math.random() * (max - min) 83 | if (type === 'number') 84 | context[storageName] = Math.round(context[storageName]) 85 | }, 86 | ), 87 | ] 88 | export default steps 89 | -------------------------------------------------------------------------------- /feature-runner/steps/retryCheck.spec.ts: -------------------------------------------------------------------------------- 1 | import { retryCheck } from './retryCheck.js' 2 | import { describe, it, mock } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('retryCheck()', () => { 6 | void it('should execute the check function once if it succeeds', async () => { 7 | const checkFn = mock.fn(() => true) 8 | const retryFn = mock.fn(async () => Promise.resolve()) 9 | await retryCheck(checkFn, retryFn) 10 | assert.equal(checkFn.mock.callCount(), 1) 11 | assert.equal(retryFn.mock.callCount(), 0) 12 | }) 13 | void it('should run the retry function if the check function fails', async () => { 14 | const checkFn = mock.fn() 15 | checkFn.mock.mockImplementationOnce(() => { 16 | throw new Error() 17 | }, 0) 18 | checkFn.mock.mockImplementationOnce(() => true, 1) 19 | const retryFn = mock.fn(async () => Promise.resolve()) 20 | await retryCheck(checkFn, retryFn) 21 | assert.equal(checkFn.mock.callCount(), 2) 22 | assert.equal(retryFn.mock.callCount(), 1) 23 | }) 24 | void it('should reject with error from check function if number of retries is exceeded', async () => { 25 | const err = new Error() 26 | const checkFn = mock.fn(() => { 27 | throw err 28 | }) 29 | 30 | const retryFn = mock.fn(async () => Promise.resolve()) 31 | try { 32 | await retryCheck(checkFn, retryFn, { 33 | tries: 3, 34 | minDelay: 0, 35 | maxDelay: 0, 36 | }) 37 | } catch (error) { 38 | assert.equal(error, err) 39 | } 40 | assert.equal(checkFn.mock.callCount(), 3) 41 | assert.equal(retryFn.mock.callCount(), 2) 42 | }) 43 | }) 44 | -------------------------------------------------------------------------------- /feature-runner/steps/retryCheck.ts: -------------------------------------------------------------------------------- 1 | export type Options = { 2 | /** 3 | * Number of tries (including the initial try) 4 | * 5 | * @default 3 6 | */ 7 | tries?: number 8 | 9 | /** 10 | * The exponential factor to use. 11 | * 12 | * @default 1.5 13 | */ 14 | factor?: number 15 | 16 | /** 17 | * The number of milliseconds before starting the second retry. 18 | * 19 | * @default 2500 20 | */ 21 | minDelay?: number 22 | 23 | /** 24 | * The maximum number of milliseconds between two retries. 25 | * 26 | * @default Infinity 27 | */ 28 | maxDelay?: number 29 | } 30 | 31 | /** 32 | * Immediately executes the check function and in case it fails, retries the check after executing the retry function. 33 | * 34 | * This is useful for retrying promises that already have been called. 35 | */ 36 | export const retryCheck = async ( 37 | checkFn: () => unknown, 38 | retryFn: () => Promise, 39 | options?: Options, 40 | ): Promise => { 41 | try { 42 | checkFn() 43 | } catch { 44 | const maxTries = (options?.tries ?? 3) - 1 45 | let wait = options?.minDelay ?? 2500 46 | 47 | for (let i = 0; i < maxTries; i++) { 48 | try { 49 | await retryFn() 50 | checkFn() 51 | return 52 | } catch (err) { 53 | if (i === maxTries - 1) throw err 54 | await new Promise((resolve) => setTimeout(resolve, wait)) 55 | wait = Math.max( 56 | options?.maxDelay ?? Number.POSITIVE_INFINITY, 57 | wait * (options?.factor ?? 1.5), 58 | ) 59 | } 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /feature-runner/steps/timestream.ts: -------------------------------------------------------------------------------- 1 | import { 2 | QueryCommand, 3 | TimestreamQueryClient, 4 | } from '@aws-sdk/client-timestream-query' 5 | import { 6 | codeBlockOrThrow, 7 | type StepRunner, 8 | } from '@nordicsemiconductor/bdd-markdown' 9 | import { parseResult } from '@nordicsemiconductor/timestream-helpers' 10 | import type { World } from '../run-features.js' 11 | import type { UserCredentials } from './cognito.js' 12 | import { retryCheck } from './retryCheck.js' 13 | import { arrayMatching, check, objectMatching } from 'tsmatchers' 14 | 15 | type TimestreamWorld = World & { 16 | cognito?: UserCredentials 17 | } 18 | 19 | const retryableQuery = (client: TimestreamQueryClient, QueryString: string) => { 20 | let result: Record[] = [] 21 | return { 22 | send: async () => { 23 | const res = await client.send( 24 | new QueryCommand({ 25 | QueryString, 26 | }), 27 | ) 28 | result = parseResult(res) 29 | }, 30 | result: () => result, 31 | } 32 | } 33 | 34 | const steps = (): StepRunner[] => { 35 | let currentQuery: ReturnType | undefined = undefined 36 | 37 | return [ 38 | { 39 | match: (title) => /^I run this Timestream query$/.test(title), 40 | run: async ({ context, step, log: { progress } }): Promise => { 41 | const { code: query } = codeBlockOrThrow(step) 42 | 43 | if (context.cognito === undefined) 44 | throw new Error(`Cognito authentication not available.`) 45 | const timestream = new TimestreamQueryClient({ 46 | credentials: { 47 | secretAccessKey: context.cognito.SecretKey, 48 | accessKeyId: context.cognito.AccessKeyId, 49 | sessionToken: context.cognito.SessionToken, 50 | }, 51 | }) 52 | 53 | progress('timestream', query) 54 | currentQuery = retryableQuery(timestream, query) 55 | await currentQuery.send() 56 | }, 57 | }, 58 | { 59 | match: (title) => /^the Timestream result should match$/.test(title), 60 | run: async ({ step }) => 61 | retryCheck( 62 | () => { 63 | const code = codeBlockOrThrow(step) 64 | const expected: Record[] = JSON.parse(code.code) 65 | 66 | check(currentQuery?.result() ?? []).is( 67 | arrayMatching(expected.map((o) => objectMatching(o))), 68 | ) 69 | }, 70 | async () => {}, 71 | ), 72 | }, 73 | ] 74 | } 75 | 76 | export default steps 77 | -------------------------------------------------------------------------------- /feature-runner/steps/util.ts: -------------------------------------------------------------------------------- 1 | export const matchString = (name: string): string => '`(?<' + name + '>[^`]+)`' 2 | export const matchInteger = (name: string): string => 3 | '(?<' + name + '>-?[1-9][0-9]*)' 4 | 5 | export const matchChoice = (name: string, options: string[]): string => 6 | `(?<${name}>${options.join('|')})` 7 | -------------------------------------------------------------------------------- /features/A-GNSS-fan-out.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | variants: 3 | - device: agnssContainerDevice1 4 | - device: agnssContainerDevice2 5 | needs: 6 | - A-GNSS 7 | - Connect a tracker 8 | - Register a new account 9 | exampleContext: 10 | userPassword: secret 11 | userEmail: user@example.com 12 | tracker: 13 | agnssContainerDevice1: 14 | id: device-a 15 | agnssContainerDevice2: 16 | id: device-b 17 | --- 18 | 19 | # A-GNSS Data Fan Out (The cargo container scenario) 20 | 21 | > In this scenario hundreds, or thousands of devices are unloaded from a steel 22 | > walled cargo container (intermodal container). All of them connect to the 23 | > cellular network, and the same cell tower, and request A-GNSS data, because 24 | > they have been offline for weeks while being shipped over the ocean. 25 | > 26 | > While all devices should receive A-GNSS data as per their request, we do not 27 | > want to hammer to third-party API with thousands of requests for the same 28 | > A-GNSS data. 29 | 30 | ## Register and connect device 31 | 32 | Given I generate a certificate for the `` tracker 33 | 34 | And I connect the `` tracker 35 | 36 | ## Request A-GNSS data 37 | 38 | Given the `` tracker is subscribed to the topic 39 | `${tracker..id}/agnss` 40 | 41 | When the `` tracker publishes this message to the topic 42 | `${tracker..id}/agnss/get` 43 | 44 | ```json 45 | { 46 | "mcc": "$number{agnssMcc}", 47 | "mnc": "$number{agnssMnc}", 48 | "cell": "$number{agnssCellId}", 49 | "area": "$number{agnssArea}", 50 | "types": [1, 2, 3, 4, 6, 7, 8, 9] 51 | } 52 | ``` 53 | 54 | Soon the `` tracker receives `2` raw messages on the topic 55 | `${tracker..id}/agnss` into `agnssData` 56 | 57 | Then 58 | `$length($filter(agnssData, function($v) { $contains($v, '01010100f9fffffffeffffff0f7b12890612031f00017') })) > 0` 59 | should equal true 60 | 61 | And 62 | `$length($filter(agnssData, function($v) { $contains($v, '01021e0001006400c675009cff859f13000b0000c6753') })) > 0` 63 | should equal true 64 | -------------------------------------------------------------------------------- /features/CellGeolocationnRFCloud.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | variants: 3 | - nw: ltem 4 | - nw: nbiot 5 | exampleContext: 6 | geolocationApiUrl: https://daaxyz.lambda-url.eu-west-1.on.aws 7 | cellId: 1 8 | ts: 1694598183204 9 | --- 10 | 11 | # nRF Cloud Cell Geolocation 12 | 13 | > Resolve device geo location through the cell it is connected to using the nRF 14 | > Cloud API. 15 | 16 | > Note: nRF Cloud's geolocation API does not distinguish between different 17 | > network modes. 18 | 19 | ## Background 20 | 21 | > This enqueues a mock response on the mock HTTP API the stack is configure to 22 | > use for the nRF Cloud integration 23 | 24 | Given I have a random number between `1` and `100000000` in `cellId` 25 | 26 | And I have a random number between `0` and `20000` in `accuracy` 27 | 28 | And I have a random float between `-90` and `90` in `lat` 29 | 30 | And I have a random float between `-180` and `180` in `lng` 31 | 32 | And I enqueue this mock HTTP API response for a POST request to 33 | `api.nrfcloud.com/v1/location/ground-fix` 34 | 35 | ```json 36 | { 37 | "uncertainty": "$number{accuracy}", 38 | "lat": "$number{lat}", 39 | "lon": "$number{lng}", 40 | "fulfilledWith": "SCELL" 41 | } 42 | ``` 43 | 44 | ## Query the cell 45 | 46 | Given I store `$millis()` into `ts` 47 | 48 | When I GET 49 | `${geolocationApiUrl}/cell?area=30401&cell=${cellId}&mccmnc=24201&nw=${variant.nw}&ts=${ts}` 50 | 51 | 52 | 53 | Soon the response status code should equal 200 54 | 55 | Then the `Access-Control-Allow-Origin` response header should equal `*` 56 | 57 | And the `Content-Type` response header should equal `application/json` 58 | 59 | And the response body should equal 60 | 61 | ```json 62 | { 63 | "accuracy": "$number{accuracy}", 64 | "lat": "$number{lat}", 65 | "lng": "$number{lng}", 66 | "source": "SCELL" 67 | } 68 | ``` 69 | 70 | ## The nRF Cloud API should have been called 71 | 72 | Then the mock HTTP API should have been called with a POST request to 73 | `api.nrfcloud.com/v1/location/ground-fix` 74 | 75 | ```json 76 | { 77 | "lte": [ 78 | { 79 | "eci": "$number{cellId}", 80 | "mcc": 242, 81 | "mnc": 1, 82 | "tac": 30401 83 | } 84 | ] 85 | } 86 | ``` 87 | -------------------------------------------------------------------------------- /features/ConnectTracker.feature.md: -------------------------------------------------------------------------------- 1 | # Connect a tracker 2 | 3 | > As a user I can Connect a tracker 4 | 5 | ## Generate a certificate and connect 6 | 7 | Given I generate a certificate for the tracker 8 | 9 | ## Connect the tracker 10 | 11 | > We use just-in-time-provisioning so this scenario is expected to be retried, 12 | > because the @aws-sdk/client-iot endpoint will disconnect a new device when it 13 | > first connects. 14 | 15 | > See 16 | > https://docs.aws.amazon.com/iot/latest/developerguide/jit-provisioning.html 17 | 18 | Given I connect the tracker 19 | -------------------------------------------------------------------------------- /features/DeleteTrackers.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | exampleContext: 3 | userPassword: secret 4 | userEmail: user@example.com 5 | tracker: 6 | : 7 | id: device-a 8 | needs: 9 | - Connect a tracker 10 | order: last 11 | variants: 12 | - device: default 13 | - device: agnssContainerDevice1 14 | - device: agnssContainerDevice2 15 | - device: pgpsContainerDevice1 16 | - device: pgpsContainerDevice2 17 | - device: fota 18 | --- 19 | 20 | # Delete trackers 21 | 22 | > As a user I can delete trackers 23 | 24 | ## Background 25 | 26 | Given I am authenticated with Cognito as `${userEmail}` with password 27 | `${userPassword}` 28 | 29 | And I disconnect the tracker 30 | 31 | ## Delete the tracker 32 | 33 | When I execute `listThingPrincipals` of `@aws-sdk/client-iot` with 34 | 35 | ```json 36 | { 37 | "thingName": "${tracker..id}" 38 | } 39 | ``` 40 | 41 | Then `$count(awsSDK.res.principals)` should equal 1 42 | 43 | Given I store `awsSDK.res.principals[0]` into `certificateArn` 44 | 45 | Given I store `$split(awsSDK.res.principals[0], '/')[1]` into `certificateId` 46 | 47 | Given I execute `detachThingPrincipal` of `@aws-sdk/client-iot` with 48 | 49 | ```json 50 | { 51 | "thingName": "${tracker..id}", 52 | "principal": "${certificateArn}" 53 | } 54 | ``` 55 | 56 | And I execute `updateCertificate` of `@aws-sdk/client-iot` with 57 | 58 | ```json 59 | { 60 | "certificateId": "${certificateId}", 61 | "newStatus": "INACTIVE" 62 | } 63 | ``` 64 | 65 | And I execute `deleteCertificate` of `@aws-sdk/client-iot` with 66 | 67 | ```json 68 | { 69 | "certificateId": "${certificateId}" 70 | } 71 | ``` 72 | 73 | And I execute `deleteThing` of `@aws-sdk/client-iot` with 74 | 75 | ```json 76 | { 77 | "thingName": "${tracker..id}" 78 | } 79 | ``` 80 | -------------------------------------------------------------------------------- /features/DeleteUser.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Attach Iot Policy to user 4 | - Delete trackers 5 | order: last 6 | --- 7 | 8 | # Delete a user 9 | 10 | > As a user I can delete my account again 11 | 12 | ## un-assign the IoT policy 13 | 14 | When I execute `detachPolicy` of `@aws-sdk/client-iot` with 15 | 16 | ```json 17 | { 18 | "target": "${cognito.IdentityId}", 19 | "policyName": "${userIotPolicyName}" 20 | } 21 | ``` 22 | 23 | And I execute `listAttachedPolicies` of `@aws-sdk/client-iot` with 24 | 25 | ```json 26 | { 27 | "target": "${cognito.IdentityId}" 28 | } 29 | ``` 30 | 31 | Then `awsSDK.res.policies` should equal 32 | 33 | ```json 34 | [] 35 | ``` 36 | 37 | ## Delete the Cognito User 38 | 39 | When I execute `deleteUser` of `@aws-sdk/client-cognito-identity-provider` with 40 | 41 | ```json 42 | { 43 | "AccessToken": "${cognito.AccessToken}" 44 | } 45 | ``` 46 | -------------------------------------------------------------------------------- /features/DeviceBatchData.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Device Update Shadow 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | tracker: 8 | default: 9 | id: device-a 10 | --- 11 | 12 | # Device Batch Data 13 | 14 | > Devices can publish batch data 15 | 16 | ## Background 17 | 18 | Given I store `$millis()` into `ts1` 19 | 20 | And I store `$millis()+(120*1000)` into `ts2` 21 | 22 | And I have a random float between `-180` and `180` in `lng1` 23 | 24 | And I have a random float between `-180` and `180` in `lng2` 25 | 26 | ## Devices can publish batch data 27 | 28 | Given the tracker publishes this message to the topic 29 | `${tracker.default.id}/batch` 30 | 31 | ```json 32 | { 33 | "gnss": [ 34 | { 35 | "v": { 36 | "lng": "$number{lng1}", 37 | "lat": 50.109177, 38 | "acc": 28.032738, 39 | "alt": 204.623276, 40 | "spd": 0.698944, 41 | "hdg": 0 42 | }, 43 | "ts": "$number{ts1}" 44 | }, 45 | { 46 | "v": { 47 | "lng": "$number{lng2}", 48 | "lat": 63.422975, 49 | "acc": 12.276645, 50 | "alt": 137.319351, 51 | "spd": 6.308265, 52 | "hdg": 77.472923 53 | }, 54 | "ts": "$number{ts2}" 55 | } 56 | ] 57 | } 58 | ``` 59 | 60 | ## Fetch the batch data 61 | 62 | Given I am authenticated with Cognito as `${userEmail}` with password 63 | `${userPassword}` 64 | 65 | When I run this Timestream query 66 | 67 | ``` 68 | SELECT measure_value::double AS value 69 | FROM "${historicaldataDatabaseName}"."${historicaldataTableName}" 70 | WHERE deviceId='${tracker.default.id}' 71 | AND measure_name='gnss.lng' 72 | AND measure_value::double IS NOT NULL 73 | ORDER BY time DESC 74 | ``` 75 | 76 | Soon the Timestream result should match 77 | 78 | ```json 79 | [ 80 | { 81 | "value": "$number{lng1}" 82 | } 83 | ] 84 | ``` 85 | 86 | Soon the Timestream result should match 87 | 88 | ```json 89 | [ 90 | { 91 | "value": "$number{lng2}" 92 | } 93 | ] 94 | ``` 95 | -------------------------------------------------------------------------------- /features/DeviceMessages.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Device Update Shadow 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | tracker: 8 | default: 9 | id: device-a 10 | --- 11 | 12 | # Device Messages 13 | 14 | > Devices can publish arbitrary messages on the /messages topic and that the 15 | > messages can then be queried in Timestream. 16 | 17 | ## Background 18 | 19 | Given I have a random number between `0` and `1024` in `button1` 20 | 21 | And I have a random number between `0` and `1024` in `button2` 22 | 23 | And I have a random number between `1` and `300` in `magnitude` 24 | 25 | ## Devices publishes that a button was pressed 26 | 27 | Given I store `$millis()` into `ts` 28 | 29 | Then the tracker publishes this message to the topic 30 | `${tracker.default.id}/messages` 31 | 32 | ```json 33 | { 34 | "btn": { 35 | "v": "$number{button1}", 36 | "ts": "$number{ts}" 37 | } 38 | } 39 | ``` 40 | 41 | Given I store `$millis()` into `ts` 42 | 43 | Then the tracker publishes this message to the topic 44 | `${tracker.default.id}/messages` 45 | 46 | ```json 47 | { 48 | "btn": { 49 | "v": "$number{button2}", 50 | "ts": "$number{ts}" 51 | } 52 | } 53 | ``` 54 | 55 | ## User retrieves the button presses 56 | 57 | Given I am authenticated with Cognito as `${userEmail}` with password 58 | `${userPassword}` 59 | 60 | When I run this Timestream query 61 | 62 | ``` 63 | SELECT measure_value::double AS value 64 | FROM "${historicaldataDatabaseName}"."${historicaldataTableName}" 65 | WHERE deviceId='${tracker.default.id}' AND measure_name='btn' AND measure_value::double IS NOT NULL 66 | ORDER BY time DESC 67 | ``` 68 | 69 | Soon the Timestream result should match 70 | 71 | ```json 72 | [ 73 | { 74 | "value": "$number{button1}" 75 | } 76 | ] 77 | ``` 78 | 79 | Soon the Timestream result should match 80 | 81 | ```json 82 | [ 83 | { 84 | "value": "$number{button2}" 85 | } 86 | ] 87 | ``` 88 | 89 | ## Devices publishes that an impact was detected 90 | 91 | Given I store `$millis()` into `ts` 92 | 93 | Then the tracker publishes this message to the topic 94 | `${tracker.default.id}/messages` 95 | 96 | ```json 97 | { 98 | "impact": { 99 | "v": "$number{magnitude}", 100 | "ts": "$number{ts}" 101 | } 102 | } 103 | ``` 104 | 105 | ## User retrieves the impact messages 106 | 107 | Given I am authenticated with Cognito as `${userEmail}` with password 108 | `${userPassword}` 109 | 110 | When I run this Timestream query 111 | 112 | ``` 113 | SELECT measure_value::double AS value 114 | FROM "${historicaldataDatabaseName}"."${historicaldataTableName}" 115 | WHERE deviceId='${tracker.default.id}' AND measure_name='impact' AND measure_value::double IS NOT NULL 116 | ORDER BY time DESC 117 | ``` 118 | 119 | Soon the Timestream result should match 120 | 121 | ```json 122 | [ 123 | { 124 | "value": "$number{magnitude}" 125 | } 126 | ] 127 | ``` 128 | -------------------------------------------------------------------------------- /features/DeviceUpdateShadow.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Connect a tracker 4 | --- 5 | 6 | # Device Update Shadow 7 | 8 | > Devices can update their shadow 9 | 10 | ## Publish device information to reported state 11 | 12 | Given I store `$millis()` into `updateShadowTs` 13 | 14 | Soon the tracker updates its reported state with 15 | 16 | ```json 17 | { 18 | "dev": { 19 | "v": { 20 | "imei": "352656106111232", 21 | "iccid": "89882806660004909182", 22 | "modV": "mfw_nrf9160_1.0.0", 23 | "brdV": "thingy91_nrf9160", 24 | "appV": "0.14.6" 25 | }, 26 | "ts": "$number{updateShadowTs}" 27 | }, 28 | "roam": { 29 | "v": { 30 | "nw": "LTE-M", 31 | "band": 3 32 | }, 33 | "ts": "$number{updateShadowTs}" 34 | }, 35 | "bat": { 36 | "v": 3781, 37 | "ts": "$number{updateShadowTs}" 38 | }, 39 | "cfg": { 40 | "act": false, 41 | "actwt": 60, 42 | "mvres": 60, 43 | "mvt": 3600, 44 | "loct": 1000, 45 | "accath": 10.5, 46 | "accith": 5.2, 47 | "accito": 1.7 48 | } 49 | } 50 | ``` 51 | -------------------------------------------------------------------------------- /features/FOTA.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Connect a tracker 4 | - Register a new account 5 | exampleContext: 6 | userPassword: secret 7 | userEmail: user@example.com 8 | jobId: bdfe16e9-2aec-48e3-8b1f-addd9560d3b7 9 | --- 10 | 11 | # Device Firmware Upgrade over the air 12 | 13 | > As a user I can upgrade the firmware of my devices over the air 14 | 15 | ## Register and connect device 16 | 17 | Given I generate a certificate for the `fota` tracker 18 | 19 | And I connect the `fota` tracker 20 | 21 | ## Create a new firmware upgrade as a user 22 | 23 | Given I am authenticated with Cognito as `${userEmail}` with password 24 | `${userPassword}` 25 | 26 | Given I have a random UUID in `jobId` 27 | 28 | When I execute `putObject` of `@aws-sdk/client-s3` with 29 | 30 | ```json 31 | { 32 | "Bucket": "${fotaBucketName}", 33 | "Key": "${jobId}", 34 | "Body": "SOME HEX DATA", 35 | "ContentLength": 13, 36 | "ContentType": "text/x-hex" 37 | } 38 | ``` 39 | 40 | When I have this JSON-encoded in `jobDocument` 41 | 42 | ```json 43 | { 44 | "operation": "app_fw_update", 45 | "size": 13, 46 | "filename": "1.0.1.hex", 47 | "location": { 48 | "protocol": "https", 49 | "host": "${fotaBucketName}.s3.amazonaws.com", 50 | "path": "${jobId}" 51 | }, 52 | "fwversion": "1.0.1" 53 | } 54 | ``` 55 | 56 | And I execute `createJob` of `@aws-sdk/client-iot` with 57 | 58 | ```json 59 | { 60 | "jobId": "${jobId}", 61 | "targets": ["${tracker.fota.arn}"], 62 | "document": "${jobDocument}", 63 | "description": "Upgrade ${tracker.fota.id} to version 1.0.1.", 64 | "targetSelection": "SNAPSHOT" 65 | } 66 | ``` 67 | 68 | Then `awsSDK.res.jobId` should equal `${jobId}` 69 | 70 | ## Fetch the job as a device 71 | 72 | Soon the `fota` tracker stores the next started job into `job` 73 | 74 | Then `job` should match 75 | 76 | ```json 77 | { 78 | "jobId": "${jobId}", 79 | "status": "IN_PROGRESS" 80 | } 81 | ``` 82 | 83 | ## describe the job 84 | 85 | When I execute `describeJobExecution` of `@aws-sdk/client-iot` with 86 | 87 | ```json 88 | { 89 | "jobId": "${jobId}", 90 | "thingName": "${tracker.fota.id}" 91 | } 92 | ``` 93 | 94 | Then `awsSDK.res.execution` should match 95 | 96 | ```json 97 | { 98 | "jobId": "${jobId}", 99 | "status": "IN_PROGRESS", 100 | "versionNumber": 2 101 | } 102 | ``` 103 | 104 | ## cancel the job 105 | 106 | When I execute `cancelJobExecution` of `@aws-sdk/client-iot` with 107 | 108 | ```json 109 | { 110 | "jobId": "${jobId}", 111 | "force": true, 112 | "thingName": "${tracker.fota.id}" 113 | } 114 | ``` 115 | 116 | When I execute `describeJobExecution` of `@aws-sdk/client-iot` with 117 | 118 | ```json 119 | { 120 | "jobId": "${jobId}", 121 | "thingName": "${tracker.fota.id}" 122 | } 123 | ``` 124 | 125 | Then `awsSDK.res.execution` should match 126 | 127 | ```json 128 | { 129 | "jobId": "${jobId}", 130 | "status": "CANCELED" 131 | } 132 | ``` 133 | 134 | ## delete the job 135 | 136 | Given I execute `deleteObject` of `@aws-sdk/client-s3` with 137 | 138 | ```json 139 | { 140 | "Bucket": "${fotaBucketName}", 141 | "Key": "${jobId}" 142 | } 143 | ``` 144 | 145 | And I execute `deleteJobExecution` of `@aws-sdk/client-iot` with 146 | 147 | ```json 148 | { 149 | "jobId": "${jobId}", 150 | "thingName": "${tracker.fota.id}", 151 | "executionNumber": 1 152 | } 153 | ``` 154 | -------------------------------------------------------------------------------- /features/IoTUserPolicy.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Register a new account 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | cognito: 8 | IdentityId: ea2fec87-3d53-41f0-ac45-965fbbc3d755 9 | userIotPolicyName: asset-tracker-userIotPolicy-1R9BPIB0QVIOJ 10 | --- 11 | 12 | # Attach Iot Policy to user 13 | 14 | > As a user I need to attach an IoT policy to my account so it can send and 15 | > receive IoT messages via Websockets 16 | 17 | ## Background 18 | 19 | Given I am authenticated with Cognito as `${userEmail}` with password 20 | `${userPassword}` 21 | 22 | ## Initially the user should not have policies 23 | 24 | When I execute `listAttachedPolicies` of `@aws-sdk/client-iot` with 25 | 26 | ```json 27 | { 28 | "target": "${cognito.IdentityId}" 29 | } 30 | ``` 31 | 32 | Then `awsSDK.res.policies` should match 33 | 34 | ```json 35 | [] 36 | ``` 37 | 38 | ## Self-assign the policy 39 | 40 | When I execute `attachPolicy` of `@aws-sdk/client-iot` with 41 | 42 | ```json 43 | { 44 | "target": "${cognito.IdentityId}", 45 | "policyName": "${userIotPolicyName}" 46 | } 47 | ``` 48 | 49 | And I execute `listAttachedPolicies` of `@aws-sdk/client-iot` with 50 | 51 | ```json 52 | { 53 | "target": "${cognito.IdentityId}" 54 | } 55 | ``` 56 | 57 | Then `awsSDK.res.policies` should match 58 | 59 | ```json 60 | [{ "policyName": "${userIotPolicyName}" }] 61 | ``` 62 | -------------------------------------------------------------------------------- /features/ListDevices.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Connect a tracker 4 | --- 5 | 6 | # List devices 7 | 8 | As a user I can list the devices 9 | 10 | ## Background 11 | 12 | And I am authenticated with Cognito as `${userEmail}` with password 13 | `${userPassword}` 14 | 15 | ## The user should equal able to list devices 16 | 17 | When I execute `listThings` of `@aws-sdk/client-iot` 18 | 19 | Then `awsSDK.res.things[thingName='${tracker:id}'].thingName` should equal 20 | `${tracker:id}` 21 | -------------------------------------------------------------------------------- /features/NeighborCellGeolocationnRFCloud.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | variants: 3 | - nw: ltem 4 | - nw: nbiot 5 | needs: 6 | - Store neighboring cell measurement reports 7 | exampleContext: 8 | userPassword: secret 9 | userEmail: user@example.com 10 | networkSurveyGeolocationApiUrl: https://daaxyz.lambda-url.eu-west-1.on.aws 11 | surveyId: bdfe16e9-2aec-48e3-8b1f-addd9560d3b7 12 | ts: 1694598183204 13 | --- 14 | 15 | # nRF Cloud Neighbor Cell Geolocation 16 | 17 | > Resolve the device geolocation through the neighboring cell measurement 18 | > reports using the nRF Cloud API. 19 | 20 | > Note: nRF Cloud's geolocation API does not distinguish between different 21 | > network modes. 22 | 23 | ## Background 24 | 25 | > This enqueues a mock response on the mock HTTP API the stack is configure to 26 | > use for the nRF Cloud integration 27 | 28 | Given I am authenticated with Cognito as `${userEmail}` with password 29 | `${userPassword}` 30 | 31 | And I have a random number between `0` and `2000` in `accuracy` 32 | 33 | And I have a random float between `-90` and `90` in `lat` 34 | 35 | And I have a random float between `-180` and `180` in `lng` 36 | 37 | And I store `_ncellmeasCellId` into `cellId` 38 | 39 | And I store `_ncellmeasAreaId` into `areaId` 40 | 41 | And I store `_ncellmeasSurveyId` into `surveyId` 42 | 43 | And I enqueue this mock HTTP API response for a POST request to 44 | `api.nrfcloud.com/v1/location/ground-fix` 45 | 46 | ```json 47 | { 48 | "uncertainty": "$number{accuracy}", 49 | "lat": "$number{lat}", 50 | "lon": "$number{lng}", 51 | "fulfilledWith": "MCELL" 52 | } 53 | ``` 54 | 55 | ## Retrieve the location for the report 56 | 57 | Given I store `$millis()` into `ts` 58 | 59 | When I GET `${networkSurveyGeolocationApiUrl}/${surveyId}?ts=${ts}` 60 | 61 | Soon the response status code should equal 200 62 | 63 | Then the `Access-Control-Allow-Origin` response header should equal `*` 64 | 65 | And the `Content-Type` response header should equal `application/json` 66 | 67 | And the response body should equal 68 | 69 | ```json 70 | { 71 | "accuracy": "$number{accuracy}", 72 | "lat": "$number{lat}", 73 | "lng": "$number{lng}", 74 | "source": "MCELL" 75 | } 76 | ``` 77 | 78 | ## The nRF Cloud API should have been called 79 | 80 | Then the mock HTTP API should have been called with a POST request to 81 | `api.nrfcloud.com/v1/location/ground-fix` 82 | 83 | ```json 84 | { 85 | "lte": [ 86 | { 87 | "mcc": 242, 88 | "mnc": 1, 89 | "eci": "$number{cellId}", 90 | "tac": "$number{areaId}", 91 | "earfcn": 6446, 92 | "adv": 80, 93 | "rsrp": -97, 94 | "rsrq": -9, 95 | "nmr": [ 96 | { 97 | "earfcn": 262143, 98 | "pci": 501, 99 | "rsrp": -104, 100 | "rsrq": -18 101 | }, 102 | { 103 | "earfcn": 262142, 104 | "pci": 503, 105 | "rsrp": -116, 106 | "rsrq": -11 107 | } 108 | ] 109 | } 110 | ] 111 | } 112 | ``` 113 | -------------------------------------------------------------------------------- /features/P-GPS-fan-out.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | variants: 3 | - device: pgpsContainerDevice1 4 | - device: pgpsContainerDevice2 5 | needs: 6 | - P-GPS 7 | exampleContext: 8 | tracker: 9 | pgpsContainerDevice1: 10 | id: device-a 11 | pgpsContainerDevice2: 12 | id: device-b 13 | userPassword: secret 14 | userEmail: user@example.com 15 | --- 16 | 17 | # P-GPS Data Fan Out (The cargo container scenario) 18 | 19 | > In this scenario hundreds, or thousands of devices are unloaded from a steel 20 | > walled cargo container (intermodal container). All of them connect to the 21 | > cellular network, and the same cell tower, and request P-GPS data, because 22 | > they have been offline for weeks while being shipped over the ocean. 23 | 24 | > While all devices should receive P-GPS data as per their request, we do not 25 | > want to hammer to third-party API with thousands of requests for the same 26 | > P-GPS data. 27 | 28 | ## Register and connect device 29 | 30 | Given I generate a certificate for the `` tracker 31 | 32 | And I connect the `` tracker 33 | 34 | ## Request P-GPS data 35 | 36 | Given the `` tracker is subscribed to the topic 37 | `${tracker..id}/pgps` 38 | 39 | When the `` tracker publishes this message to the topic 40 | `${tracker..id}/pgps/get` 41 | 42 | ```json 43 | { 44 | "n": "$number{predictionCount}", 45 | "time": "$number{startGpsTimeOfDaySeconds}" 46 | } 47 | ``` 48 | 49 | Soon the `` tracker receives a messages on the topic 50 | `${tracker..id}/pgps` into `pgpsData` 51 | 52 | And `pgpsData` should match 53 | 54 | ```json 55 | { 56 | "path": "public/15131-0_15135-72000.bin", 57 | "host": "pgps.nrfcloud.com" 58 | } 59 | ``` 60 | -------------------------------------------------------------------------------- /features/P-GPS.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Connect a tracker 4 | exampleContext: 5 | currentGpsDay: 15956 6 | startGpsTimeOfDaySeconds: 0 7 | predictionCount: 1 8 | tracker: 9 | default: 10 | id: cf3fbe5d-a8fe-4d70-8d4a-8e46e01d85c2 11 | --- 12 | 13 | # P-GPS 14 | 15 | > Devices can request P-GPS data to decrease their time-to-fix when using GPS 16 | 17 | ## Background 18 | 19 | > Prepare the mock API responses. 20 | 21 | Given I have a random number between `1` and `168` in `predictionCount` 22 | 23 | And I have a random number between `0` and `86399` in `startGpsTimeOfDaySeconds` 24 | 25 | And I enqueue this mock HTTP API response for a GET request to 26 | `api.nrfcloud.com/v1/location/pgps?predictionCount=${predictionCount}&predictionIntervalMinutes=240&startGpsDay=${currentGpsDay}&startGpsTimeOfDaySeconds=${startGpsTimeOfDaySeconds}` 27 | 28 | ```json 29 | { 30 | "path": "public/15131-0_15135-72000.bin", 31 | "host": "pgps.nrfcloud.com" 32 | } 33 | ``` 34 | 35 | ## Request P-GPS data 36 | 37 | Given the tracker is subscribed to the topic `${tracker.default.id}/pgps` 38 | 39 | When the tracker publishes this message to the topic 40 | `${tracker.default.id}/pgps/get` 41 | 42 | ```json 43 | { 44 | "n": "$number{predictionCount}", 45 | "time": "$number{startGpsTimeOfDaySeconds}" 46 | } 47 | ``` 48 | 49 | Soon the tracker receives a message on the topic `${tracker.default.id}/pgps` 50 | into `pgpsData` 51 | 52 | Then `pgpsData` should match 53 | 54 | ```json 55 | { 56 | "path": "public/15131-0_15135-72000.bin", 57 | "host": "pgps.nrfcloud.com" 58 | } 59 | ``` 60 | -------------------------------------------------------------------------------- /features/QueryHistoricalData.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Device Update Shadow 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | --- 8 | 9 | # Query Data 10 | 11 | > As a user I can query the historical data of a device 12 | 13 | ## Query historical data 14 | 15 | Given I am authenticated with Cognito as `${userEmail}` with password 16 | `${userPassword}` 17 | 18 | When I run this Timestream query 19 | 20 | ``` 21 | SELECT measure_value::double AS value 22 | FROM "${historicaldataDatabaseName}"."${historicaldataTableName}" 23 | WHERE deviceId='${tracker.default.id}' AND measure_name='bat' AND measure_value::double IS NOT NULL LIMIT 1 24 | ``` 25 | 26 | Soon the Timestream result should match 27 | 28 | ```json 29 | [{ "value": 3781 }] 30 | ``` 31 | -------------------------------------------------------------------------------- /features/README.md: -------------------------------------------------------------------------------- 1 | This folder contains descriptions of the implemented features, which are 2 | automatically tested using 3 | [our End-to-end Behaviour Driven Design Test Runner](https://github.com/NordicSemiconductor/cloud-e2e-bdd-test-runner-js#end-to-end-behaviour-driven-design-test-runner-). 4 | -------------------------------------------------------------------------------- /features/ReadDeviceShadow.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Update Device Configuration 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | --- 8 | 9 | # Read Device Shadow 10 | 11 | > As a user I can read the device shadow 12 | 13 | ## Read reported and desired state as user 14 | 15 | Given I am authenticated with Cognito as `${userEmail}` with password 16 | `${userPassword}` 17 | 18 | When I execute `getThingShadow` of `@aws-sdk/client-iot-data-plane` with 19 | 20 | ```json 21 | { "thingName": "${tracker.default.id}" } 22 | ``` 23 | 24 | And I parse JSON-encoded `awsSDK.res.payload` into `shadow` 25 | 26 | Then `shadow.state.reported` should match 27 | 28 | ```json 29 | { 30 | "dev": { 31 | "v": { 32 | "imei": "352656106111232", 33 | "iccid": "89882806660004909182", 34 | "modV": "mfw_nrf9160_1.0.0", 35 | "brdV": "thingy91_nrf9160", 36 | "appV": "0.14.6" 37 | }, 38 | "ts": "$number{updateShadowTs}" 39 | }, 40 | "roam": { 41 | "v": { 42 | "nw": "LTE-M", 43 | "band": 3 44 | }, 45 | "ts": "$number{updateShadowTs}" 46 | } 47 | } 48 | ``` 49 | 50 | And `shadow.state.desired` should match 51 | 52 | ```json 53 | { 54 | "cfg": { 55 | "act": false, 56 | "actwt": 60, 57 | "mvres": 60, 58 | "mvt": 3600, 59 | "loct": 1000, 60 | "accath": 10.5, 61 | "accith": 5.2, 62 | "accito": 1.7 63 | } 64 | } 65 | ``` 66 | -------------------------------------------------------------------------------- /features/UpdateDeviceConfiguration.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Device Update Shadow 4 | - Attach Iot Policy to user 5 | exampleContext: 6 | userPassword: secret 7 | userEmail: user@example.com 8 | --- 9 | 10 | # Update Device Configuration 11 | 12 | > As a user I can update the device configuration 13 | 14 | ## Update the device configuration as a user 15 | 16 | Given I am authenticated with Cognito as `${userEmail}` with password 17 | `${userPassword}` 18 | 19 | And I have this JSON-encoded in `payload` 20 | 21 | ```json 22 | { 23 | "state": { 24 | "desired": { 25 | "cfg": { 26 | "act": false, 27 | "actwt": 60, 28 | "mvres": 60, 29 | "mvt": 3600, 30 | "loct": 1000, 31 | "accath": 10.5, 32 | "accith": 5.2, 33 | "accito": 1.7 34 | } 35 | } 36 | } 37 | } 38 | ``` 39 | 40 | When I execute `updateThingShadow` of `@aws-sdk/client-iot-data-plane` with 41 | 42 | ```json 43 | { 44 | "thingName": "${tracker.default.id}", 45 | "payload": "${payload}" 46 | } 47 | ``` 48 | -------------------------------------------------------------------------------- /features/UserRegistration.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | exampleContext: 3 | userPoolClientId: 3s8c3hhgtc6ut8gapd45ljnvgk 4 | userPassword: secret 5 | userEmail: user@example.com 6 | --- 7 | 8 | # Register a new account 9 | 10 | > As a user I can register a new account 11 | 12 | > Note: this tests only that sign up is possible (which can be disabled), once 13 | > this works, password reset etc. can be assumed to be working because this is 14 | > handled by AWS Cognito. 15 | 16 | ## Sign up 17 | 18 | Given I have a random email in `userEmail` 19 | 20 | And I have a random password in `userPassword` 21 | 22 | When I execute `signUp` of `@aws-sdk/client-cognito-identity-provider` with 23 | 24 | ```json 25 | { 26 | "ClientId": "${userPoolClientId}", 27 | "Password": "${userPassword}", 28 | "Username": "${userEmail}" 29 | } 30 | ``` 31 | 32 | Then `awsSDK.res` should match 33 | 34 | ```json 35 | { 36 | "UserConfirmed": false 37 | } 38 | ``` 39 | -------------------------------------------------------------------------------- /features/WiFiSiteSurveyStorage.feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | needs: 3 | - Device Update Shadow 4 | exampleContext: 5 | userPassword: secret 6 | userEmail: user@example.com 7 | tracker: 8 | default: 9 | id: device-a 10 | --- 11 | 12 | # Store WiFi site surveys 13 | 14 | > WiFi site surveys are too big to be stored in the AWS shadow, so they are 15 | > stored in a DynamoDB. 16 | 17 | ## Background 18 | 19 | Given I am authenticated with Cognito as `${userEmail}` with password 20 | `${userPassword}` 21 | 22 | ## Device publishes WiFi site survey 23 | 24 | Given I store `$millis()` into `ts` 25 | 26 | Then the tracker publishes this message to the topic 27 | `${tracker.default.id}/ground-fix` 28 | 29 | ```json 30 | { 31 | "wifi": { 32 | "ts": "$number{ts}", 33 | "aps": [ 34 | "4ce175805e6f", 35 | "4ce175805e6e", 36 | "743aef44b743", 37 | "743aef44b742", 38 | "4ce17501156e", 39 | "4ce17501156f", 40 | "4ce175bf092e", 41 | "4ce175bf092f", 42 | "743aef44b74a", 43 | "4ce175bf0921", 44 | "4ce175bf0920", 45 | "80e01d098f67", 46 | "80e01d098f65", 47 | "80e01d098f61", 48 | "80e01d098f68", 49 | "80e01d098f62", 50 | "80e01d098f69", 51 | "80e01d098f6d", 52 | "4ce175011560", 53 | "aa1544ac6c3a", 54 | "80e01d098f6a", 55 | "80e01d098f6e", 56 | "9a1544ac6c3a", 57 | "9e1544ac6c3a" 58 | ] 59 | } 60 | } 61 | ``` 62 | 63 | ## Find the latest survey 64 | 65 | When I execute `query` of `@aws-sdk/client-dynamodb` with 66 | 67 | ```json 68 | { 69 | "TableName": "${networkSurveyStorageTableName}", 70 | "IndexName": "surveyByDevice", 71 | "ScanIndexForward": false, 72 | "KeyConditionExpression": "#deviceId = :deviceId", 73 | "ExpressionAttributeNames": { 74 | "#deviceId": "deviceId" 75 | }, 76 | "ExpressionAttributeValues": { 77 | ":deviceId": { 78 | "S": "${tracker.default.id}" 79 | } 80 | }, 81 | "Limit": 1 82 | } 83 | ``` 84 | 85 | Soon I store `awsSDK.res.Items[0].surveyId.S` into `networkSurveyId` 86 | 87 | When I execute `getItem` of `@aws-sdk/client-dynamodb` with 88 | 89 | ```json 90 | { 91 | "TableName": "${networkSurveyStorageTableName}", 92 | "Key": { 93 | "surveyId": { 94 | "S": "${networkSurveyId}" 95 | } 96 | } 97 | } 98 | ``` 99 | 100 | Soon `awsSDK.res.Item` should match 101 | 102 | ```json 103 | { 104 | "wifi": { 105 | "M": { 106 | "ts": { "N": "${ts}" }, 107 | "aps": { 108 | "L": [ 109 | { "S": "4ce175805e6f" }, 110 | { "S": "4ce175805e6e" }, 111 | { "S": "743aef44b743" }, 112 | { "S": "743aef44b742" }, 113 | { "S": "4ce17501156e" }, 114 | { "S": "4ce17501156f" }, 115 | { "S": "4ce175bf092e" }, 116 | { "S": "4ce175bf092f" }, 117 | { "S": "743aef44b74a" }, 118 | { "S": "4ce175bf0921" }, 119 | { "S": "4ce175bf0920" }, 120 | { "S": "80e01d098f67" }, 121 | { "S": "80e01d098f65" }, 122 | { "S": "80e01d098f61" }, 123 | { "S": "80e01d098f68" }, 124 | { "S": "80e01d098f62" }, 125 | { "S": "80e01d098f69" }, 126 | { "S": "80e01d098f6d" }, 127 | { "S": "4ce175011560" }, 128 | { "S": "aa1544ac6c3a" }, 129 | { "S": "80e01d098f6a" }, 130 | { "S": "80e01d098f6e" }, 131 | { "S": "9a1544ac6c3a" }, 132 | { "S": "9e1544ac6c3a" } 133 | ] 134 | } 135 | } 136 | }, 137 | "deviceId": { "S": "${tracker.default.id}" } 138 | } 139 | ``` 140 | -------------------------------------------------------------------------------- /geolocation/Cell.ts: -------------------------------------------------------------------------------- 1 | import type { cellId } from '../cellGeolocation/cellId.js' 2 | 3 | export type Cell = Parameters[0] 4 | -------------------------------------------------------------------------------- /geolocation/Location.ts: -------------------------------------------------------------------------------- 1 | export type Location = { 2 | lat: number 3 | lng: number 4 | accuracy: number 5 | } 6 | -------------------------------------------------------------------------------- /geolocation/parseMCCMNC.spec.ts: -------------------------------------------------------------------------------- 1 | import { parseMCCMNC } from './parseMCCMNC.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('parseMCCMNC()', () => { 6 | for (const [mccmnc, mcc, mnc] of [ 7 | [310410, 310, 410], 8 | [24201, 242, 1], 9 | ] as [number, number, number][]) { 10 | void it(`should parse the MCCMNC ${mccmnc} into MNC ${mnc} and MCC ${mcc}`, () => 11 | assert.deepEqual(parseMCCMNC(mccmnc), [mcc, mnc])) 12 | } 13 | }) 14 | -------------------------------------------------------------------------------- /geolocation/parseMCCMNC.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Parses to MCCMNC tuple into MCC and MNC 3 | */ 4 | export const parseMCCMNC = (mccmnc: number): [mnc: number, mnc: number] => { 5 | const s = mccmnc.toFixed(0) 6 | return [parseInt(s.slice(0, 3), 10), parseInt(s.slice(3), 10)] 7 | } 8 | -------------------------------------------------------------------------------- /geolocation/queueJob.ts: -------------------------------------------------------------------------------- 1 | import type { SQSClient } from '@aws-sdk/client-sqs' 2 | import { SendMessageCommand } from '@aws-sdk/client-sqs' 3 | import { ErrorType, type ErrorInfo } from '../api/ErrorInfo.js' 4 | 5 | export const queueJob = 6 | ({ sqs, QueueUrl }: { sqs: SQSClient; QueueUrl: string }) => 7 | async ({ 8 | payload, 9 | deduplicationId, 10 | delay, 11 | }: { 12 | payload: unknown 13 | deduplicationId?: string 14 | delay?: number 15 | }): Promise<{ error: ErrorInfo } | void> => { 16 | try { 17 | console.debug( 18 | JSON.stringify({ 19 | queueJob: { 20 | payload, 21 | }, 22 | }), 23 | ) 24 | const { MessageId, SequenceNumber } = await sqs.send( 25 | new SendMessageCommand({ 26 | QueueUrl, 27 | MessageBody: JSON.stringify(payload), 28 | MessageGroupId: deduplicationId, 29 | MessageDeduplicationId: deduplicationId, 30 | DelaySeconds: delay, 31 | }), 32 | ) 33 | console.debug( 34 | JSON.stringify({ 35 | queueJob: { 36 | QueueUrl, 37 | MessageId, 38 | SequenceNumber, 39 | }, 40 | }), 41 | ) 42 | } catch (err) { 43 | console.error( 44 | JSON.stringify({ 45 | queueJob: { 46 | error: (err as Error).message, 47 | cell: payload, 48 | QueueUrl, 49 | }, 50 | }), 51 | ) 52 | return { 53 | error: { 54 | type: ErrorType.InternalError, 55 | message: (err as Error).message, 56 | }, 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /geolocation/types.ts: -------------------------------------------------------------------------------- 1 | import type { LocationSource } from '../cellGeolocation/stepFunction/types.js' 2 | import type { Location } from './Location.js' 3 | 4 | export type MaybeLocation = ( 5 | | { 6 | located: false 7 | } 8 | | { 9 | located: true 10 | source: LocationSource 11 | } 12 | ) & 13 | Partial 14 | -------------------------------------------------------------------------------- /historicalData/batchToTimestreamRecords.ts: -------------------------------------------------------------------------------- 1 | import type { _Record } from '@aws-sdk/client-timestream-write' 2 | import { toRecord } from '@nordicsemiconductor/timestream-helpers' 3 | import { randomUUID } from 'node:crypto' 4 | import { isNotNullOrUndefined } from '../util/isNullOrUndefined.js' 5 | 6 | export const batchToTimestreamRecords = (event: BatchMessage): _Record[] => { 7 | const Records: (_Record | undefined)[] = Object.entries(event.batch) 8 | .map(([name, messages]) => 9 | ( 10 | messages as ( 11 | | NumberValueSensor 12 | | NumbersValueSensor 13 | | NumbersAndStringsValueSensor 14 | )[] 15 | ) 16 | ?.map((m) => { 17 | const ts = m.ts 18 | const measureGroup = randomUUID() 19 | if (typeof m.v === 'number') { 20 | return toRecord({ 21 | name, 22 | v: m.v, 23 | ts, 24 | dimensions: { measureGroup }, 25 | }) 26 | } 27 | return Object.entries(m.v) 28 | .map(([k, v]) => 29 | toRecord({ 30 | name: `${name}.${k}`, 31 | v, 32 | ts, 33 | dimensions: { measureGroup }, 34 | }), 35 | ) 36 | .filter(isNotNullOrUndefined) 37 | .flat() 38 | }) 39 | .flat(), 40 | ) 41 | .flat() 42 | 43 | return Records.filter(isNotNullOrUndefined) as _Record[] 44 | } 45 | -------------------------------------------------------------------------------- /historicalData/messageToTimestreamRecords.spec.ts: -------------------------------------------------------------------------------- 1 | import { messageToTimestreamRecords } from './messageToTimestreamRecords.js' 2 | import { describe, it } from 'node:test' 3 | import { arrayContaining, check, stringMatching, withLength } from 'tsmatchers' 4 | 5 | const Dimensions = [ 6 | { 7 | Name: 'measureGroup', 8 | Value: stringMatching( 9 | /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i, 10 | ), 11 | }, 12 | ] 13 | 14 | void describe('messageToTimestreamRecords', () => { 15 | void it('should convert a message to Timestream records', () => { 16 | check( 17 | messageToTimestreamRecords({ 18 | message: { 19 | btn: { 20 | v: 0, 21 | ts: 1606474470069, 22 | }, 23 | }, 24 | deviceId: 'slipslop-particle-santalum', 25 | }), 26 | ).is( 27 | withLength(1).and( 28 | arrayContaining({ 29 | Dimensions, 30 | MeasureName: 'btn', 31 | MeasureValue: '0', 32 | MeasureValueType: 'DOUBLE', 33 | Time: '1606474470069', 34 | TimeUnit: 'MILLISECONDS', 35 | }), 36 | ), 37 | ) 38 | }) 39 | void it('should convert a impact message to Timestream records', () => { 40 | check( 41 | messageToTimestreamRecords({ 42 | message: { 43 | impact: { 44 | v: 200, 45 | ts: 1606474470069, 46 | }, 47 | }, 48 | deviceId: 'slipslop-particle-santalum', 49 | }), 50 | ).is( 51 | withLength(1).and( 52 | arrayContaining({ 53 | Dimensions, 54 | MeasureName: 'impact', 55 | MeasureValue: '200', 56 | MeasureValueType: 'DOUBLE', 57 | Time: '1606474470069', 58 | TimeUnit: 'MILLISECONDS', 59 | }), 60 | ), 61 | ) 62 | }) 63 | }) 64 | -------------------------------------------------------------------------------- /historicalData/messageToTimestreamRecords.ts: -------------------------------------------------------------------------------- 1 | import type { _Record } from '@aws-sdk/client-timestream-write' 2 | import { toRecord } from '@nordicsemiconductor/timestream-helpers' 3 | import { randomUUID } from 'node:crypto' 4 | import { isNotNullOrUndefined } from '../util/isNullOrUndefined.js' 5 | 6 | export const messageToTimestreamRecords = (event: DeviceMessage): _Record[] => { 7 | const Records: (_Record | undefined)[] = [] 8 | if (event.message.btn !== undefined) { 9 | Records.push( 10 | toRecord({ 11 | name: 'btn', 12 | ts: event.message.btn.ts, 13 | v: event.message.btn.v, 14 | dimensions: { measureGroup: randomUUID() }, 15 | }), 16 | ) 17 | } 18 | 19 | if (event.message.impact !== undefined) { 20 | Records.push( 21 | toRecord({ 22 | name: 'impact', 23 | ts: event.message.impact.ts, 24 | v: event.message.impact.v, 25 | dimensions: { measureGroup: randomUUID() }, 26 | }), 27 | ) 28 | } 29 | 30 | return Records.filter(isNotNullOrUndefined) as _Record[] 31 | } 32 | -------------------------------------------------------------------------------- /historicalData/shadowUpdateToTimestreamRecords.ts: -------------------------------------------------------------------------------- 1 | import type { _Record } from '@aws-sdk/client-timestream-write' 2 | import { toRecord } from '@nordicsemiconductor/timestream-helpers' 3 | import { randomUUID } from 'node:crypto' 4 | import { isNotNullOrUndefined } from '../util/isNullOrUndefined.js' 5 | 6 | export const shadowUpdateToTimestreamRecords = (event: { 7 | reported: Record< 8 | string, 9 | { 10 | v: Record< 11 | string, 12 | { 13 | toString: () => string 14 | } | null 15 | > 16 | ts: number 17 | } 18 | > 19 | }): _Record[] => { 20 | const measureGroup = randomUUID() 21 | 22 | const Records: (_Record | undefined)[] = [] 23 | const props = Object.keys(event.reported).filter( 24 | (s) => !/^(cfg|bat)$/.test(s), 25 | ) as (keyof Omit)[] 26 | 27 | for (const s of props) { 28 | const v = event.reported[s]?.v 29 | if (v === undefined) continue 30 | const ts = event.reported[s]?.ts as number 31 | for (const [name, value] of Object.entries(v)) { 32 | if (value === null) continue 33 | Records.push( 34 | toRecord({ 35 | name: `${s}.${name}`, 36 | v: value, 37 | ts, 38 | dimensions: { measureGroup }, 39 | }), 40 | ) 41 | } 42 | } 43 | return Records.filter(isNotNullOrUndefined) as _Record[] 44 | } 45 | -------------------------------------------------------------------------------- /historicalData/storeMessagesInTimestream.ts: -------------------------------------------------------------------------------- 1 | import type { Dimension, _Record } from '@aws-sdk/client-timestream-write' 2 | import { toRecord, writeClient } from '@nordicsemiconductor/timestream-helpers' 3 | import { fromEnv } from '../util/fromEnv.js' 4 | import { batchToTimestreamRecords } from './batchToTimestreamRecords.js' 5 | import { messageToTimestreamRecords } from './messageToTimestreamRecords.js' 6 | import { shadowUpdateToTimestreamRecords } from './shadowUpdateToTimestreamRecords.js' 7 | import { storeRecordsInTimeseries } from './storeRecordsInTimeseries.js' 8 | import { randomUUID } from 'node:crypto' 9 | 10 | const { tableInfo } = fromEnv({ 11 | tableInfo: 'TABLE_INFO', 12 | })(process.env) 13 | 14 | const [DatabaseName, TableName] = tableInfo.split('|') as [string, string] 15 | const store = (async () => 16 | storeRecordsInTimeseries({ 17 | timestream: await writeClient(), 18 | DatabaseName, 19 | TableName, 20 | }))() 21 | 22 | const storeUpdate = async (Records: _Record[], Dimensions: Dimension[]) => { 23 | console.debug( 24 | JSON.stringify({ DatabaseName, TableName, Records, Dimensions }), 25 | ) 26 | return (await store)(Records, { Dimensions }) 27 | } 28 | 29 | /** 30 | * Processes device messages and updates and stores the in Timestream 31 | */ 32 | export const handler = async ( 33 | event: UpdatedDeviceState | DeviceMessage | BatchMessage, 34 | ): Promise => { 35 | console.debug(JSON.stringify(event)) 36 | 37 | const Dimensions = [ 38 | { 39 | Name: 'deviceId', 40 | Value: event.deviceId, 41 | }, 42 | ] 43 | 44 | try { 45 | if ('reported' in event) { 46 | const { cfg, bat, ...reported } = event.reported 47 | void cfg // remove from reported 48 | const records = shadowUpdateToTimestreamRecords({ 49 | reported, 50 | }) 51 | if (bat !== undefined) { 52 | const batRecord = toRecord({ 53 | name: 'bat', 54 | ts: bat.ts, 55 | v: bat.v, 56 | dimensions: { 57 | measureGroup: records[0]?.Dimensions?.[0]?.Value ?? randomUUID(), 58 | }, 59 | }) 60 | if (batRecord !== undefined) { 61 | records.push(batRecord) 62 | } 63 | } 64 | await storeUpdate(records, Dimensions) 65 | return 66 | } 67 | if ('message' in event) { 68 | await storeUpdate(messageToTimestreamRecords(event), Dimensions) 69 | return 70 | } 71 | if ('batch' in event) { 72 | await storeUpdate(batchToTimestreamRecords(event), [ 73 | ...Dimensions, 74 | { 75 | Name: 'source', 76 | Value: 'batch', 77 | }, 78 | ]) 79 | return 80 | } 81 | console.error( 82 | JSON.stringify({ 83 | error: 'Unknown event', 84 | event, 85 | }), 86 | ) 87 | } catch (err) { 88 | console.error(err) 89 | console.error( 90 | JSON.stringify({ 91 | error: (err as Error).message, 92 | }), 93 | ) 94 | return 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /historicalData/storeRecordsInTimeseries.ts: -------------------------------------------------------------------------------- 1 | import type { TimestreamWriteClient } from '@aws-sdk/client-timestream-write' 2 | import { 3 | WriteRecordsCommand, 4 | type _Record, 5 | } from '@aws-sdk/client-timestream-write' 6 | 7 | type AWSError = { message: string; code: number } 8 | 9 | export const storeRecordsInTimeseries = 10 | ({ 11 | timestream, 12 | DatabaseName, 13 | TableName, 14 | }: { 15 | timestream: TimestreamWriteClient 16 | DatabaseName: string 17 | TableName: string 18 | }) => 19 | async (Records: _Record[], CommonAttributes?: _Record): Promise => { 20 | if (Records.length === 0) { 21 | console.warn( 22 | JSON.stringify({ 23 | storeRecordsInTimeseries: 'No records to store.', 24 | }), 25 | ) 26 | return 27 | } 28 | const request = timestream.send( 29 | new WriteRecordsCommand({ 30 | DatabaseName, 31 | TableName, 32 | Records, 33 | CommonAttributes, 34 | }), 35 | ) 36 | try { 37 | await request 38 | } catch (err) { 39 | const RejectedRecords = JSON.parse( 40 | (request as any).response.httpResponse.body.toString(), 41 | ).RejectedRecords 42 | if (RejectedRecords !== undefined) { 43 | console.error({ 44 | RejectedRecords, 45 | }) 46 | } 47 | throw new Error(`${(err as AWSError).code}: ${(err as AWSError).message}`) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /historicalData/types.d.ts: -------------------------------------------------------------------------------- 1 | type SensorWithTimestamp = { 2 | v: unknown 3 | ts: number 4 | } 5 | 6 | type NumberValueSensor = SensorWithTimestamp & { 7 | v: number 8 | } 9 | 10 | type NumbersValueSensor = SensorWithTimestamp & { 11 | v: Record 12 | } 13 | 14 | type NumbersAndStringsValueSensor = SensorWithTimestamp & { 15 | v: Record 16 | } 17 | 18 | type UpdatedDeviceState = { 19 | reported: { 20 | cfg?: Record 21 | bat?: NumberValueSensor 22 | env?: NumbersValueSensor 23 | acc?: NumbersValueSensor 24 | gnss?: NumbersValueSensor 25 | dev?: NumbersAndStringsValueSensor 26 | roam?: NumbersAndStringsValueSensor 27 | } 28 | deviceId: string 29 | } 30 | 31 | type DeviceMessage = { 32 | message: { 33 | impact?: { 34 | ts: number 35 | v: number 36 | } 37 | btn?: { 38 | v: number 39 | ts: number 40 | } 41 | } 42 | deviceId: string 43 | } 44 | 45 | type BatchMessage = { 46 | batch: { 47 | btn?: NumberValueSensor[] 48 | bat?: NumberValueSensor[] 49 | env?: NumbersValueSensor[] 50 | acc?: NumbersValueSensor[] 51 | gnss?: NumbersValueSensor[] 52 | dev?: NumbersAndStringsValueSensor[] 53 | roam?: NumbersAndStringsValueSensor[] 54 | } 55 | deviceId: string 56 | } 57 | -------------------------------------------------------------------------------- /networkSurveyGeolocation/expandMac.spec.ts: -------------------------------------------------------------------------------- 1 | import { expandMac } from './expandMac.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('expandMac()', () => { 6 | for (const [original, expanded] of [ 7 | ['80e01d098f6e', '80:e0:1d:09:8f:6e'], 8 | ['80:e0:1d:09:8f:6e', '80:e0:1d:09:8f:6e'], 9 | ] as [string, string][]) { 10 | void it(`should expand the mac ${original} to ${expanded}`, () => 11 | assert.equal(expandMac(original), expanded)) 12 | } 13 | }) 14 | -------------------------------------------------------------------------------- /networkSurveyGeolocation/expandMac.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Expands a MAC address written in a form without separators to a form with separators. 3 | * asset_tracker_v2 sends MACs without separators. 4 | */ 5 | export const expandMac = (mac: string): string => { 6 | if (!/^[a-f0-9]+$/i.test(mac) || mac.length % 2 !== 0) return mac 7 | return mac.split('').reduce((expanded, byte, index) => { 8 | if (index > 0 && index % 2 === 0) expanded += ':' 9 | expanded += byte 10 | return expanded 11 | }, '') 12 | } 13 | -------------------------------------------------------------------------------- /networkSurveyGeolocation/geolocateSurvey.ts: -------------------------------------------------------------------------------- 1 | import type { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import { GetItemCommand } from '@aws-sdk/client-dynamodb' 3 | import { unmarshall } from '@aws-sdk/util-dynamodb' 4 | import { ErrorType, type ErrorInfo } from '../api/ErrorInfo.js' 5 | import type { Location } from '../geolocation/Location.js' 6 | import type { LocationSource } from '../cellGeolocation/stepFunction/types.js' 7 | 8 | export type Survey = { 9 | deviceId: string 10 | timestamp: Date 11 | surveyId: string 12 | unresolved: boolean 13 | lte?: Record 14 | nw?: string 15 | wifi?: Record 16 | } 17 | 18 | export const geolocateSurvey = 19 | ({ dynamodb, TableName }: { dynamodb: DynamoDBClient; TableName: string }) => 20 | async ( 21 | id: string, 22 | ): Promise< 23 | | { error: ErrorInfo } 24 | | { survey: Survey & { location?: Location; source?: LocationSource } } 25 | > => { 26 | try { 27 | const { Item } = await dynamodb.send( 28 | new GetItemCommand({ 29 | TableName, 30 | Key: { 31 | surveyId: { 32 | S: id, 33 | }, 34 | }, 35 | }), 36 | ) 37 | 38 | console.debug( 39 | JSON.stringify({ 40 | geolocateSurvey: Item, 41 | }), 42 | ) 43 | 44 | if (Item === undefined) throw new Error('NOT_FOUND') 45 | 46 | const entry = unmarshall(Item) 47 | const survey: Survey & { location?: Location; source?: LocationSource } = 48 | { 49 | surveyId: entry.surveyId, 50 | deviceId: entry.deviceId, 51 | timestamp: new Date(entry.timestamp), 52 | unresolved: entry.unresolved, 53 | source: entry.source ?? undefined, 54 | lte: entry.lte as Record | undefined, 55 | nw: entry.nw as string | undefined, 56 | wifi: entry.wifi as Record | undefined, 57 | } 58 | if ('lat' in entry) { 59 | survey.location = { 60 | lat: entry.lat, 61 | lng: entry.lng, 62 | accuracy: entry.accuracy ?? 5000, 63 | } 64 | } 65 | return { survey } 66 | } catch (err) { 67 | if ( 68 | (err as Error).message === 'NOT_FOUND' || 69 | (err as Error).name === 'ResourceNotFoundException' 70 | ) 71 | return { 72 | error: { 73 | type: ErrorType.EntityNotFound, 74 | message: `Survey ${id} not found!`, 75 | }, 76 | } 77 | console.error( 78 | JSON.stringify({ 79 | geolocateSurvey: { 80 | err, 81 | errorMessage: (err as Error).message, 82 | id, 83 | TableName, 84 | }, 85 | }), 86 | ) 87 | return { 88 | error: { 89 | type: ErrorType.InternalError, 90 | message: (err as Error).message, 91 | }, 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /pgps/cacheKey.spec.ts: -------------------------------------------------------------------------------- 1 | import { cacheKey } from './cacheKey.js' 2 | import { gpsDay } from './gpsTime.js' 3 | import { describe, it } from 'node:test' 4 | import assert from 'node:assert' 5 | 6 | void describe('cacheKey', () => { 7 | void it('should create a cache key', () => 8 | assert.equal( 9 | cacheKey({ 10 | binHours: 1, 11 | request: { 12 | n: 42, 13 | int: 240, 14 | day: 15160, 15 | time: 40655, 16 | }, 17 | }), 18 | `42-240-15160-40655-${new Date() 19 | .toISOString() 20 | .slice(0, 13) 21 | .replace(/[:-]/g, '')}0000`, 22 | )) 23 | void it('should create a cache key with defaults', () => 24 | assert.equal( 25 | cacheKey({ 26 | request: {}, 27 | binHours: 1, 28 | }), 29 | `42-240-${gpsDay()}-0-${new Date() 30 | .toISOString() 31 | .slice(0, 13) 32 | .replace(/[:-]/g, '')}0000`, 33 | )) 34 | }) 35 | -------------------------------------------------------------------------------- /pgps/cacheKey.ts: -------------------------------------------------------------------------------- 1 | import type { Static } from '@sinclair/typebox' 2 | import { gpsDay } from './gpsTime.js' 3 | import type { pgpsRequestSchema } from './types.js' 4 | 5 | // Default values, all properties for requests are optional 6 | export const defaultNumberOfPredictions = 42 7 | export const defaultInterval = 240 8 | export const defaultTimeOfDay = 0 9 | 10 | export const cacheKey = ({ 11 | request, 12 | binHours, 13 | }: { 14 | request: Static 15 | binHours: number 16 | }): string => { 17 | const binMs = binHours * 60 * 60 * 1000 18 | const { n, day, int, time } = request 19 | return `${n ?? defaultNumberOfPredictions}-${int ?? defaultInterval}-${ 20 | day ?? gpsDay() 21 | }-${time ?? defaultTimeOfDay}-${new Date( 22 | Math.floor(Date.now() / binMs) * binMs, 23 | ) 24 | .toISOString() 25 | .slice(0, 19) 26 | .replace(/[:-]/g, '')}` 27 | } 28 | -------------------------------------------------------------------------------- /pgps/getCache.ts: -------------------------------------------------------------------------------- 1 | import type { DynamoDBClient } from '@aws-sdk/client-dynamodb' 2 | import { GetItemCommand } from '@aws-sdk/client-dynamodb' 3 | import { unmarshall } from '@aws-sdk/util-dynamodb' 4 | import type { Static } from '@sinclair/typebox' 5 | import { URL } from 'url' 6 | import { ErrorType, type ErrorInfo } from '../api/ErrorInfo.js' 7 | import type { pgpsRequestSchema } from './types.js' 8 | 9 | export type PGPSDataCache = Static & { 10 | source: string 11 | url?: URL 12 | unresolved?: boolean 13 | updatedAt: Date 14 | } 15 | 16 | export const getCache = 17 | ({ dynamodb, TableName }: { dynamodb: DynamoDBClient; TableName: string }) => 18 | async (cacheKey: string): Promise<{ error: ErrorInfo } | PGPSDataCache> => { 19 | try { 20 | const { Item } = await dynamodb.send( 21 | new GetItemCommand({ 22 | TableName, 23 | Key: { 24 | cacheKey: { 25 | S: cacheKey, 26 | }, 27 | }, 28 | }), 29 | ) 30 | 31 | if (Item === undefined) throw new Error('NOT_FOUND') 32 | 33 | const entry = unmarshall(Item) 34 | const i = { 35 | ...entry, 36 | url: entry.url !== undefined ? new URL(entry.url) : undefined, 37 | updatedAt: new Date(entry.updatedAt as string), 38 | } as PGPSDataCache 39 | 40 | console.debug( 41 | JSON.stringify({ 42 | getCache: { entry: i }, 43 | }), 44 | ) 45 | return i 46 | } catch (err) { 47 | if ( 48 | (err as Error).message === 'NOT_FOUND' || 49 | (err as Error).name === 'ResourceNotFoundException' 50 | ) 51 | return { 52 | error: { 53 | type: ErrorType.EntityNotFound, 54 | message: `Report ${cacheKey} not found!`, 55 | }, 56 | } 57 | console.error( 58 | JSON.stringify({ 59 | getCache: { 60 | err, 61 | errorMessage: (err as Error).message, 62 | id: cacheKey, 63 | TableName, 64 | }, 65 | }), 66 | ) 67 | return { 68 | error: { 69 | type: ErrorType.InternalError, 70 | message: (err as Error).message, 71 | }, 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /pgps/gpsTime.spec.ts: -------------------------------------------------------------------------------- 1 | import { gpsDay } from './gpsTime.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('GPS epoch time functions', () => { 6 | void it('should calculate the GPS epoch day', () => { 7 | assert.equal(gpsDay(new Date('2021-08-05T12:00:00Z')), 15188) 8 | }) 9 | }) 10 | -------------------------------------------------------------------------------- /pgps/gpsTime.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The Global Positioning System (GPS) uses its own particular time scale GPS 3 | * time. 4 | * It differs from UTC by a nearly integer number of seconds. Both time scales 5 | * had identical epochs on January 5, 1980. Because GPS time is not incremented 6 | * by leap seconds the difference between UTC and GPS time is increasing. 7 | */ 8 | 9 | const offSetToUnixTime = 315874800000 10 | 11 | /** 12 | * Return the microseconds since the GPS epoch 13 | */ 14 | export const gpsTime = (now?: Date): number => 15 | (now?.getTime() ?? Date.now()) - offSetToUnixTime 16 | 17 | /** 18 | * Return the days since the GPS epoch 19 | */ 20 | export const gpsDay = (now?: Date): number => 21 | Math.floor(gpsTime(now) / 1000 / 60 / 60 / 24) 22 | 23 | /** 24 | * Devices should not request data from the past. 25 | * 26 | * Grants some leeway for different implementations. 27 | */ 28 | export const minimumGpsDay = (): number => gpsDay() - 5 29 | -------------------------------------------------------------------------------- /pgps/types.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '@sinclair/typebox' 2 | import { minimumGpsDay } from './gpsTime.js' 3 | 4 | /** 5 | * @see https://api.nrfcloud.com/v1#tag/Predicted-GPS/operation/GetPredictedAssistanceData 6 | */ 7 | export const pgpsRequestSchema = Type.Object({ 8 | n: Type.Optional( 9 | Type.Integer({ minimum: 1, title: 'number of predictions' }), 10 | ), 11 | int: Type.Optional( 12 | Type.Integer({ minimum: 1, title: 'prediction interval in minutes' }), 13 | ), 14 | day: Type.Optional( 15 | Type.Integer({ 16 | minimum: minimumGpsDay(), 17 | maximum: 99999, // The actual minimum depends on the provider in use, do some sanity clamping here. 18 | title: 'start day of the prediction set as GPS Day', 19 | }), 20 | ), 21 | time: Type.Optional( 22 | Type.Integer({ 23 | minimum: 0, 24 | maximum: 86399, 25 | title: 'start time of the prediction set as seconds in day', 26 | }), 27 | ), 28 | }) 29 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["github>bifravst/renovate-config"], 4 | "packageRules": [ 5 | { 6 | "matchPackagePatterns": [ 7 | "@nordicsemiconductor/asset-tracker-cloud-docs", 8 | "@sinclair/typebox", 9 | "@nordicsemiconductor/bdd-markdown" 10 | ], 11 | "groupName": "typebox" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/apiclient.spec.ts: -------------------------------------------------------------------------------- 1 | import { toQueryString } from './apiclient.js' 2 | import { describe, it } from 'node:test' 3 | import assert from 'node:assert' 4 | 5 | void describe('nRF Cloud API client', () => { 6 | void it('should encode query strings', () => 7 | assert.equal( 8 | toQueryString({ 9 | types: [1, 2, 3, 4, 6, 7, 8, 9], 10 | eci: 21626624, 11 | tac: 30401, 12 | mcc: 242, 13 | mnc: 1, 14 | }), 15 | '?eci=21626624&mcc=242&mnc=1&tac=30401&types=1,2,3,4,6,7,8,9', 16 | )) 17 | }) 18 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/cellgeolocation.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import type { TObject, TProperties } from '@sinclair/typebox' 3 | import { URL } from 'url' 4 | import type { 5 | LocationSource, 6 | MaybeCellGeoLocation, 7 | } from '../../cellGeolocation/stepFunction/types.js' 8 | import type { Cell } from '../../geolocation/Cell.js' 9 | import { parseMCCMNC } from '../../geolocation/parseMCCMNC.js' 10 | import { fromEnv } from '../../util/fromEnv.js' 11 | import { apiClient } from './apiclient.js' 12 | import { groundFixRequestSchema } from './groundFixRequestSchema.js' 13 | import { locateResultSchema } from './locate.js' 14 | import { getLocationServicesApiSettings } from './settings.js' 15 | 16 | const { stackName } = fromEnv({ stackName: 'STACK_NAME' })(process.env) 17 | 18 | const fetchSettings = getLocationServicesApiSettings({ 19 | ssm: new SSMClient({}), 20 | stackName, 21 | }) 22 | 23 | export const handler = async (cell: Cell): Promise => { 24 | console.log(JSON.stringify(cell)) 25 | 26 | const { serviceKey, teamId, endpoint } = await fetchSettings() 27 | const c = apiClient({ endpoint: new URL(endpoint), serviceKey, teamId }) 28 | 29 | const [mcc, mnc] = parseMCCMNC(cell.mccmnc) 30 | const maybeCellGeolocation = await c.post({ 31 | resource: 'location/ground-fix', 32 | payload: { 33 | lte: [ 34 | { 35 | eci: cell.cell, 36 | mcc, 37 | mnc, 38 | tac: cell.area, 39 | }, 40 | ], 41 | }, 42 | requestSchema: groundFixRequestSchema as unknown as TObject, 43 | responseSchema: locateResultSchema, 44 | }) 45 | if ('error' in maybeCellGeolocation) { 46 | console.error(JSON.stringify(maybeCellGeolocation)) 47 | return { 48 | located: false, 49 | } 50 | } 51 | const { lat, lon, uncertainty, fulfilledWith } = maybeCellGeolocation 52 | console.debug( 53 | JSON.stringify({ 54 | lat, 55 | lng: lon, 56 | accuracy: uncertainty, 57 | fulfilledWith, 58 | located: true, 59 | }), 60 | ) 61 | return { 62 | lat, 63 | lng: lon, 64 | accuracy: uncertainty, 65 | source: fulfilledWith as LocationSource, 66 | located: true, 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/createToken.spec.ts: -------------------------------------------------------------------------------- 1 | import { execSync } from 'child_process' 2 | import { randomUUID } from 'crypto' 3 | import jwt from 'jsonwebtoken' 4 | import { createToken } from './createToken.js' 5 | import { describe, it } from 'node:test' 6 | import { check, objectMatching } from 'tsmatchers' 7 | 8 | void describe('createToken', () => { 9 | void it('should create a token', () => { 10 | const key = execSync('openssl ecparam -name prime256v1 -genkey', { 11 | encoding: 'utf8', 12 | }) 13 | const teamId = randomUUID() 14 | const token = createToken(teamId, key) 15 | check(jwt.verify(token, key)).is( 16 | objectMatching({ 17 | aud: teamId, 18 | }), 19 | ) 20 | }) 21 | }) 22 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/createToken.ts: -------------------------------------------------------------------------------- 1 | import jwt from 'jsonwebtoken' 2 | 3 | export const createToken = (teamId: string, serviceKey: string): string => 4 | jwt.sign({ aud: teamId }, serviceKey, { 5 | algorithm: 'ES256', 6 | }) 7 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/groundFixRequestSchema.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '@sinclair/typebox' 2 | 3 | const RSRP = Type.Integer({ 4 | minimum: -157, 5 | maximum: -44, 6 | title: 7 | 'RSRP: Reference Signal Received Power. Measured in dBm. See this page for more details. Range -157..-44', 8 | }) 9 | const RSRQ = Type.Number({ 10 | minimum: -34.5, 11 | maximum: 3.5, 12 | title: 13 | 'RSRQ: Reference Signal Received Quality. Measured in dB. See this page for more details. Range -34.5..3.5', 14 | }) 15 | const TimingAdvance = Type.Integer({ 16 | minimum: -34.5, 17 | maximum: 65535, 18 | title: 19 | 'TimingAdvance: The length of time a signal takes to reach the base station from a mobile phone (half of rtt=round trip time). The units are symbols (Ts) as specified in 3GPP TS 36.211 (LTE). The expected resolution for nRF Cloud API is 1 Ts. Range 0..20512. 65535 is reported if timing advance cannot be determined.', 20 | }) 21 | const EARFCN = Type.Integer({ 22 | description: 23 | 'Evolved Absolute Radio Frequency Channel (E-ARFCN). Range: 0..262143', 24 | minimum: 0, 25 | maximum: 262143, 26 | }) 27 | const PCI = Type.Integer({ 28 | description: 'Physical Cell Identity (PCI). Range: 0..503', 29 | minimum: 0, 30 | maximum: 504, 31 | }) 32 | 33 | const neighboringCellsSurvey = Type.Array( 34 | Type.Object( 35 | { 36 | eci: Type.Integer({ minimum: 1 }), 37 | mcc: Type.Integer({ minimum: 100, maximum: 999 }), 38 | mnc: Type.Integer({ minimum: 0, maximum: 999 }), 39 | tac: Type.Integer({ minimum: 1 }), 40 | earfcn: Type.Optional(EARFCN), 41 | adv: Type.Optional(TimingAdvance), 42 | rsrp: Type.Optional(RSRP), 43 | rsrq: Type.Optional(RSRQ), 44 | nmr: Type.Optional( 45 | Type.Array( 46 | Type.Object( 47 | { 48 | pci: PCI, 49 | earfcn: EARFCN, 50 | rsrp: RSRP, 51 | rsrq: RSRQ, 52 | }, 53 | { additionalProperties: false }, 54 | ), 55 | { minItems: 1 }, 56 | ), 57 | ), 58 | }, 59 | { additionalProperties: false }, 60 | ), 61 | { minItems: 1 }, 62 | ) 63 | 64 | const wifiSiteSurvey = Type.Object({ 65 | accessPoints: Type.Array( 66 | Type.Object( 67 | { 68 | macAddress: Type.String({ 69 | pattern: '^([a-fA-F0-9]{2}:){5}[a-fA-F0-9]{2}$', 70 | }), 71 | age: Type.Optional(Type.Integer()), 72 | frequency: Type.Optional(Type.Number()), 73 | channel: Type.Optional(Type.Integer()), 74 | signalStrength: Type.Optional( 75 | Type.Integer({ minimum: -128, maximum: 0 }), 76 | ), 77 | signalToNoiseRadio: Type.Optional(Type.Integer()), 78 | ssid: Type.Optional(Type.String()), 79 | }, 80 | { additionalProperties: false }, 81 | ), 82 | { 83 | minItems: 2, 84 | }, 85 | ), 86 | }) 87 | 88 | /** 89 | * @see https://api.nrfcloud.com/v1#tag/Ground-Fix/operation/GetLocationFromCellTowersOrWifiNetworks 90 | */ 91 | export const groundFixRequestSchema = Type.Object( 92 | { 93 | lte: Type.Optional(neighboringCellsSurvey), 94 | wifi: Type.Optional(wifiSiteSurvey), 95 | }, 96 | { 97 | additionalProperties: false, 98 | }, 99 | ) 100 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/locate.ts: -------------------------------------------------------------------------------- 1 | import { Type } from '@sinclair/typebox' 2 | import { LocationSource } from '../../cellGeolocation/stepFunction/types.js' 3 | 4 | /** 5 | * @see https://api.nrfcloud.com/v1#tag/Ground-Fix/operation/GetLocationFromCellsOrWifiNetworks 6 | */ 7 | export const locateResultSchema = Type.Object({ 8 | lat: Type.Number({ 9 | minimum: -90, 10 | maximum: 90, 11 | description: 'Global grid line, north to south. Vertical.', 12 | }), 13 | lon: Type.Number({ 14 | minimum: -180, 15 | maximum: 180, 16 | description: 'Global grid line, east to west. Horizontal.', 17 | }), 18 | uncertainty: Type.Number({ 19 | minimum: 0, 20 | description: 21 | 'Radius of the uncertainty circle around the location in meters. Also known as Horizontal Positioning Error (HPE).', 22 | }), 23 | fulfilledWith: Type.Enum(LocationSource), 24 | }) 25 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/networksurveygeolocation.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import { 3 | NeighboringCellMeasurements, 4 | validateWithType, 5 | WiFiSiteSurvey, 6 | } from '@nordicsemiconductor/asset-tracker-cloud-docs/protocol' 7 | import { 8 | Type, 9 | type Static, 10 | type TObject, 11 | type TProperties, 12 | } from '@sinclair/typebox' 13 | import { URL } from 'url' 14 | import type { MaybeLocation } from '../../geolocation/types.js' 15 | import { expandMac } from '../../networkSurveyGeolocation/expandMac.js' 16 | import { fromEnv } from '../../util/fromEnv.js' 17 | import { apiClient } from './apiclient.js' 18 | import { groundFixRequestSchema } from './groundFixRequestSchema.js' 19 | import { locateResultSchema } from './locate.js' 20 | import { getLocationServicesApiSettings } from './settings.js' 21 | import type { LocationSource } from '../../cellGeolocation/stepFunction/types.js' 22 | 23 | const { stackName } = fromEnv({ 24 | stackName: 'STACK_NAME', 25 | })(process.env) 26 | 27 | const settingsPromise = getLocationServicesApiSettings({ 28 | ssm: new SSMClient({}), 29 | stackName, 30 | })() 31 | 32 | const networkSurveyLocateInputSchema = Type.Object({ 33 | surveyId: Type.String(), 34 | deviceId: Type.String(), 35 | timestamp: Type.String(), 36 | nw: Type.String({ minLength: 1 }), 37 | lte: Type.Optional(NeighboringCellMeasurements), 38 | wifi: Type.Optional(WiFiSiteSurvey), 39 | }) 40 | 41 | const validateInput = validateWithType(networkSurveyLocateInputSchema) 42 | 43 | export const handler = async ( 44 | event: Static, 45 | ): Promise => { 46 | console.log(JSON.stringify(event)) 47 | 48 | const { serviceKey, teamId, endpoint } = await settingsPromise 49 | const c = apiClient({ endpoint: new URL(endpoint), serviceKey, teamId }) 50 | 51 | const maybeValidInput = validateInput(event) 52 | if ('errors' in maybeValidInput) { 53 | console.error(JSON.stringify(maybeValidInput)) 54 | return { 55 | located: false, 56 | } 57 | } 58 | 59 | // Request to nRFCloud 60 | const payload: Static = {} 61 | if (maybeValidInput.value.wifi !== undefined) { 62 | payload.wifi = { 63 | accessPoints: maybeValidInput.value.wifi.aps.map((macAddress) => ({ 64 | macAddress: expandMac(macAddress), 65 | })), 66 | } 67 | } 68 | if (maybeValidInput.value.lte !== undefined) { 69 | const report = maybeValidInput.value.lte 70 | payload.lte = [ 71 | { 72 | mcc: report.mcc, 73 | mnc: report.mnc, 74 | eci: report.cell, 75 | tac: report.area, 76 | earfcn: report.earfcn, 77 | adv: report.adv, 78 | rsrp: report.rsrp, 79 | rsrq: report.rsrq, 80 | nmr: report.nmr?.map(({ cell, ...rest }) => ({ 81 | pci: cell, 82 | ...rest, 83 | })), 84 | }, 85 | ] 86 | } 87 | 88 | const maybeWifiGeolocation = await c.post({ 89 | resource: 'location/ground-fix', 90 | payload, 91 | requestSchema: groundFixRequestSchema as unknown as TObject, 92 | responseSchema: locateResultSchema, 93 | }) 94 | 95 | if ('error' in maybeWifiGeolocation) { 96 | console.error(JSON.stringify(maybeWifiGeolocation)) 97 | return { 98 | located: false, 99 | } 100 | } 101 | 102 | const { lat, lon, uncertainty, fulfilledWith } = maybeWifiGeolocation 103 | console.debug( 104 | JSON.stringify({ lat, lng: lon, accuracy: uncertainty, fulfilledWith }), 105 | ) 106 | return { 107 | lat, 108 | lng: lon, 109 | accuracy: uncertainty, 110 | located: true, 111 | source: fulfilledWith as LocationSource, 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/pgps.ts: -------------------------------------------------------------------------------- 1 | import { SSMClient } from '@aws-sdk/client-ssm' 2 | import { validateWithType } from '@nordicsemiconductor/asset-tracker-cloud-docs/protocol' 3 | import { Type, type Static } from '@sinclair/typebox' 4 | import { URL } from 'url' 5 | import { 6 | defaultInterval, 7 | defaultNumberOfPredictions, 8 | defaultTimeOfDay, 9 | } from '../../pgps/cacheKey.js' 10 | import { gpsDay, minimumGpsDay } from '../../pgps/gpsTime.js' 11 | import { pgpsRequestSchema } from '../../pgps/types.js' 12 | import { fromEnv } from '../../util/fromEnv.js' 13 | import { apiClient } from './apiclient.js' 14 | import { getLocationServicesApiSettings } from './settings.js' 15 | 16 | const { stackName } = fromEnv({ stackName: 'STACK_NAME' })(process.env) 17 | 18 | const settingsPromise = getLocationServicesApiSettings({ 19 | ssm: new SSMClient({}), 20 | stackName, 21 | })() 22 | 23 | enum Interval { 24 | twoHours = 120, 25 | fourHours = 240, 26 | sixHours = 360, 27 | eightHours = 480, 28 | } 29 | 30 | /** 31 | * @see https://api.nrfcloud.com/v1#tag/Predicted-GPS/operation/GetPredictedAssistanceData 32 | */ 33 | const apiRequestSchema = Type.Object( 34 | { 35 | predictionCount: Type.Optional( 36 | Type.Integer({ 37 | minimum: 1, 38 | maximum: 168, 39 | title: 'number of predictions', 40 | }), 41 | ), 42 | predictionIntervalMinutes: Type.Optional( 43 | Type.Enum(Interval, { title: 'prediction interval in minutes' }), 44 | ), 45 | startGpsDay: Type.Optional( 46 | Type.Integer({ 47 | minimum: minimumGpsDay(), 48 | maximum: gpsDay() + 14, // Current GPS day + 2 weeks is the upper bound for nRF Cloud 49 | title: 'start day of the prediction set as GPS Day', 50 | }), 51 | ), 52 | startGpsTimeOfDaySeconds: Type.Optional( 53 | Type.Integer({ 54 | minimum: 0, 55 | maximum: 86399, 56 | title: 'start time of the prediction set as seconds in day', 57 | }), 58 | ), 59 | }, 60 | { additionalProperties: false }, 61 | ) 62 | 63 | /** 64 | * @see https://api.nrfcloud.com/v1#tag/Predicted-GPS/operation/GetPredictedAssistanceData 65 | */ 66 | const apiResponseSchema = Type.Object( 67 | { 68 | path: Type.String({ minLength: 1 }), 69 | host: Type.String({ minLength: 1 }), 70 | }, 71 | { additionalProperties: false }, 72 | ) 73 | 74 | const validateInput = validateWithType(pgpsRequestSchema) 75 | 76 | export const handler = async ( 77 | pgps: Static, 78 | ): Promise<{ resolved: boolean; url?: URL }> => { 79 | console.log(JSON.stringify(pgps)) 80 | const maybeValidInput = validateInput(pgps) 81 | if ('errors' in maybeValidInput) { 82 | console.error(JSON.stringify(maybeValidInput)) 83 | return { 84 | resolved: false, 85 | } 86 | } 87 | 88 | const { serviceKey, teamId, endpoint } = await settingsPromise 89 | const c = apiClient({ endpoint: new URL(endpoint), serviceKey, teamId }) 90 | 91 | const { n, int, day, time } = maybeValidInput.value 92 | 93 | const result = await c.get({ 94 | resource: 'location/pgps', 95 | payload: { 96 | predictionCount: n ?? defaultNumberOfPredictions, 97 | predictionIntervalMinutes: int ?? defaultInterval, 98 | startGpsDay: day ?? gpsDay(), 99 | startGpsTimeOfDaySeconds: time ?? defaultTimeOfDay, 100 | }, 101 | requestSchema: apiRequestSchema, 102 | responseSchema: apiResponseSchema, 103 | }) 104 | 105 | if ('error' in result) { 106 | console.error(JSON.stringify(result)) 107 | return { resolved: false } 108 | } 109 | 110 | return { 111 | resolved: true, 112 | url: new URL(`https://${result.host}/${result.path}`), 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /third-party/nrfcloud.com/settings.ts: -------------------------------------------------------------------------------- 1 | import type { SSMClient } from '@aws-sdk/client-ssm' 2 | import { getSettings } from '../../util/settings.js' 3 | 4 | const getApiSettings = 5 | ({ ssm, stackName }: { ssm: SSMClient; stackName: string }) => 6 | async (): Promise<{ 7 | endpoint: string 8 | serviceKey: string 9 | teamId: string 10 | }> => { 11 | const p = await getSettings({ 12 | ssm, 13 | stackName, 14 | scope: 'thirdParty', 15 | system: 'nrfcloud', 16 | })() 17 | const { endpoint, teamId, serviceKey } = p 18 | if (teamId === undefined) 19 | throw new Error(`No nRF Cloud team ID configured!`) 20 | if (serviceKey === undefined) 21 | throw new Error(`No nRF Cloud service key configured!`) 22 | return { 23 | serviceKey, 24 | endpoint: endpoint ?? 'https://api.nrfcloud.com/', 25 | teamId, 26 | } 27 | } 28 | 29 | export const getLocationServicesApiSettings = ({ 30 | ssm, 31 | stackName, 32 | }: { 33 | ssm: SSMClient 34 | stackName: string 35 | }): ReturnType => getApiSettings({ ssm, stackName }) 36 | -------------------------------------------------------------------------------- /third-party/sentry.io/settings.ts: -------------------------------------------------------------------------------- 1 | import type { SSMClient } from '@aws-sdk/client-ssm' 2 | import { getSettings } from '../../util/settings.js' 3 | 4 | export const getSentrySettings = 5 | ({ ssm, stackName }: { ssm: SSMClient; stackName: string }) => 6 | async (): Promise<{ 7 | sentryDsn?: string 8 | }> => { 9 | try { 10 | const p = await getSettings({ 11 | ssm, 12 | stackName, 13 | scope: 'thirdParty', 14 | system: 'sentry', 15 | })() 16 | return { 17 | sentryDsn: p.sentryDsn, 18 | } 19 | } catch (err) { 20 | return {} 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "esnext", 4 | "module": "nodenext", 5 | "moduleResolution": "nodenext", 6 | "resolveJsonModule": true, 7 | "esModuleInterop": true, 8 | "strict": true, 9 | "allowUnreachableCode": false, 10 | "allowUnusedLabels": false, 11 | "forceConsistentCasingInFileNames": true, 12 | "noFallthroughCasesInSwitch": true, 13 | "noImplicitOverride": true, 14 | "noImplicitReturns": true, 15 | "noUncheckedIndexedAccess": true, 16 | "noUnusedLocals": true, 17 | "noEmit": true, 18 | "verbatimModuleSyntax": true, 19 | "skipLibCheck": true 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /util/fromEnv.ts: -------------------------------------------------------------------------------- 1 | export const fromEnv = 2 | >(def: T) => 3 | (env: NodeJS.ProcessEnv): Record => 4 | Object.entries(def).reduce( 5 | (res, [defKey, envKey]) => { 6 | const v = env[envKey] 7 | if (v === undefined || v.length === 0) 8 | throw new Error(`${envKey} is not defined in environment!`) 9 | return { ...res, [defKey]: v } 10 | }, 11 | {} as Record, 12 | ) 13 | -------------------------------------------------------------------------------- /util/isNotEmpty.ts: -------------------------------------------------------------------------------- 1 | export const isEmpty = (s: string): boolean => s.length === 0 2 | export const isNotEmpty = (s: string): boolean => !isEmpty(s) 3 | -------------------------------------------------------------------------------- /util/isNullOrUndefined.ts: -------------------------------------------------------------------------------- 1 | export const isNullOrUndefined = (arg?: unknown): boolean => 2 | arg === undefined || arg === null 3 | 4 | export const isNotNullOrUndefined = (arg?: unknown): boolean => 5 | !isNullOrUndefined(arg) 6 | -------------------------------------------------------------------------------- /util/paginate.ts: -------------------------------------------------------------------------------- 1 | import { isEmpty } from './isNotEmpty.js' 2 | import { isNullOrUndefined } from './isNullOrUndefined.js' 3 | 4 | /** 5 | * Iteratively follows paginated results. 6 | * NOTE: This method has no upper runtime limit and may time out. 7 | */ 8 | export const paginate = async ({ 9 | paginator, 10 | startKey, 11 | }: { 12 | paginator: (startKey?: any) => Promise 13 | startKey?: any 14 | }): Promise => { 15 | const nextStartKey = await paginator(startKey) 16 | if (isNullOrUndefined(nextStartKey)) return 17 | if (typeof nextStartKey === 'string' && isEmpty(nextStartKey)) return 18 | await paginate({ 19 | paginator, 20 | startKey: nextStartKey, 21 | }) 22 | } 23 | -------------------------------------------------------------------------------- /util/parseJSON.ts: -------------------------------------------------------------------------------- 1 | export const parseJSON = ( 2 | json: string, 3 | ): { error: Error } | { json: Record } => { 4 | try { 5 | return JSON.parse(json) 6 | } catch (error) { 7 | return { error: error as Error } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /util/settings.ts: -------------------------------------------------------------------------------- 1 | import type { SSMClient } from '@aws-sdk/client-ssm' 2 | import { 3 | DeleteParameterCommand, 4 | GetParametersByPathCommand, 5 | PutParameterCommand, 6 | type Parameter, 7 | } from '@aws-sdk/client-ssm' 8 | import { paginate } from './paginate.js' 9 | 10 | type Scopes = 'context' | 'config' | 'thirdParty' | 'codebuild' 11 | type Systems = 'stack' | 'github' | 'nrfcloud' | 'sentry' 12 | 13 | export const settingsPath = ({ 14 | stackName, 15 | scope, 16 | system, 17 | }: { 18 | stackName: string 19 | scope: Scopes 20 | system: Systems 21 | }): string => `/${stackName}/${scope}/${system}` 22 | 23 | const settingsName = ({ 24 | stackName, 25 | scope, 26 | system, 27 | property, 28 | }: { 29 | stackName: string 30 | scope: Scopes 31 | system: Systems 32 | property: string 33 | }): string => `${settingsPath({ stackName, scope, system })}/${property}` 34 | 35 | export const getSettings = 36 | >({ 37 | ssm, 38 | stackName, 39 | scope, 40 | system, 41 | }: { 42 | ssm: SSMClient 43 | stackName: string 44 | scope: Scopes 45 | system: Systems 46 | }) => 47 | async (): Promise => { 48 | const Path = settingsPath({ stackName, scope, system }) 49 | const Parameters: Parameter[] = [] 50 | await paginate({ 51 | paginator: async (NextToken?: string) => 52 | ssm 53 | .send( 54 | new GetParametersByPathCommand({ 55 | Path, 56 | Recursive: true, 57 | NextToken, 58 | }), 59 | ) 60 | 61 | .then(async ({ Parameters: p, NextToken }) => { 62 | if (p !== undefined) Parameters.push(...p) 63 | return NextToken 64 | }), 65 | }) 66 | 67 | if (Parameters.length === 0) 68 | throw new Error(`System not configured: ${Path}!`) 69 | 70 | return Parameters.map(({ Name, ...rest }) => ({ 71 | ...rest, 72 | Name: Name?.replace(`${Path}/`, ''), 73 | })).reduce( 74 | (settings, { Name, Value }) => ({ 75 | ...settings, 76 | [Name ?? '']: Value ?? '', 77 | }), 78 | {} as Settings, 79 | ) 80 | } 81 | 82 | export const putSettings = 83 | ({ 84 | ssm, 85 | stackName, 86 | scope, 87 | system, 88 | }: { 89 | ssm: SSMClient 90 | stackName: string 91 | scope: Scopes 92 | system: Systems 93 | }) => 94 | async ({ 95 | property, 96 | value, 97 | deleteBeforeUpdate, 98 | }: { 99 | property: string 100 | value: string 101 | /** 102 | * Useful when depending on the parameter having version 1, e.g. for use in CloudFormation 103 | */ 104 | deleteBeforeUpdate?: boolean 105 | }): Promise<{ name: string }> => { 106 | const Name = settingsName({ stackName, scope, system, property }) 107 | if (deleteBeforeUpdate ?? false) { 108 | try { 109 | await ssm.send( 110 | new DeleteParameterCommand({ 111 | Name, 112 | }), 113 | ) 114 | } catch { 115 | // pass 116 | } 117 | } 118 | await ssm.send( 119 | new PutParameterCommand({ 120 | Name, 121 | Value: value, 122 | Type: 'String', 123 | Overwrite: !(deleteBeforeUpdate ?? false), 124 | }), 125 | ) 126 | return { name: Name } 127 | } 128 | 129 | export const deleteSettings = 130 | ({ 131 | ssm, 132 | stackName, 133 | scope, 134 | system, 135 | }: { 136 | ssm: SSMClient 137 | stackName: string 138 | scope: Scopes 139 | system: Systems 140 | }) => 141 | async ({ property }: { property: string }): Promise<{ name: string }> => { 142 | const Name = settingsName({ stackName, scope, system, property }) 143 | await ssm.send( 144 | new DeleteParameterCommand({ 145 | Name, 146 | }), 147 | ) 148 | return { name: Name } 149 | } 150 | --------------------------------------------------------------------------------