├── .github
└── workflows
│ ├── openapi.yml
│ ├── publication.yml
│ └── test-server.yml
├── .gitignore
├── LICENSE
├── README.md
├── dashboard
├── .env.development
├── .env.template
├── .eslintrc.json
├── .gitignore
├── Dockerfile
├── README.md
├── components.json
├── next.config.js
├── package-lock.json
├── package.json
├── postcss.config.js
├── public
│ ├── tenta-artwork-recolored.png
│ ├── tenta-banner-dashboard-og-1024-512.png
│ ├── tenta-banner-dashboard-og-1200-630.png
│ ├── tenta-favicon-1024.png
│ └── tenta-favicon-512.png
├── src
│ ├── .gitignore
│ ├── app
│ │ ├── favicon.ico
│ │ ├── globals.css
│ │ ├── layout.tsx
│ │ ├── login
│ │ │ └── page.tsx
│ │ ├── networks
│ │ │ └── [networkIdentifier]
│ │ │ │ ├── layout.tsx
│ │ │ │ ├── page.tsx
│ │ │ │ └── sensors
│ │ │ │ └── [sensorIdentifier]
│ │ │ │ ├── .gitignore
│ │ │ │ ├── activity
│ │ │ │ └── page.tsx
│ │ │ │ ├── configurations
│ │ │ │ └── page.tsx
│ │ │ │ ├── layout.tsx
│ │ │ │ ├── logs
│ │ │ │ └── page.tsx
│ │ │ │ ├── measurements
│ │ │ │ └── page.tsx
│ │ │ │ └── plots
│ │ │ │ └── page.tsx
│ │ ├── offline
│ │ │ └── page.tsx
│ │ ├── page.tsx
│ │ ├── signup
│ │ │ └── page.tsx
│ │ ├── style
│ │ │ └── page.tsx
│ │ └── swr-provider.tsx
│ ├── components
│ │ ├── custom
│ │ │ ├── auth-loading-screen.tsx
│ │ │ ├── config-revision-tag.tsx
│ │ │ ├── creation-dialog.tsx
│ │ │ ├── navigation-bar.tsx
│ │ │ ├── pagination.tsx
│ │ │ ├── spinner.tsx
│ │ │ ├── the-tenta.tsx
│ │ │ └── timestamp-label.tsx
│ │ └── ui
│ │ │ ├── button.tsx
│ │ │ ├── dialog.tsx
│ │ │ ├── input.tsx
│ │ │ ├── label.tsx
│ │ │ ├── select.tsx
│ │ │ ├── tabs.tsx
│ │ │ ├── textarea.tsx
│ │ │ └── tooltip.tsx
│ ├── lib
│ │ └── utils.ts
│ └── requests
│ │ ├── configurations.ts
│ │ ├── logs.ts
│ │ ├── measurements-aggregation.ts
│ │ ├── measurements.ts
│ │ ├── networks.ts
│ │ ├── sensors.ts
│ │ ├── status.ts
│ │ └── user.ts
├── tailwind.config.js
└── tsconfig.json
├── docker-compose.yml
├── docs
├── README.md
├── netlify.toml
├── next-env.d.ts
├── next.config.js
├── package-lock.json
├── package.json
├── pages
│ ├── _app.mdx
│ ├── _meta.json
│ ├── community.mdx
│ ├── connect.mdx
│ ├── contribute.mdx
│ ├── deployment.mdx
│ ├── design.mdx
│ ├── export.mdx
│ ├── index.mdx
│ ├── introduction.mdx
│ ├── mqtt.mdx
│ ├── next.mdx
│ ├── overview.mdx
│ └── roadmap.mdx
├── postcss.config.js
├── public
│ ├── architecture.png
│ └── banner.png
├── style.css
├── tailwind.config.js
├── theme.config.jsx
└── tsconfig.json
├── publication
├── images
│ ├── architecture.png
│ ├── configurations.png
│ └── screenshot.png
├── paper.bib
└── paper.md
└── server
├── .env.example
├── .gitignore
├── .python-version
├── Dockerfile
├── app
├── __init__.py
├── auth.py
├── database.py
├── errors.py
├── logs.py
├── main.py
├── mqtt.py
├── queries.sql
├── settings.py
├── utils.py
└── validation
│ ├── __init__.py
│ ├── constants.py
│ ├── mqtt.py
│ ├── routes.py
│ └── types.py
├── migrations
└── .gitkeep
├── openapi.yml
├── poetry.lock
├── pyproject.toml
├── schema.sql
├── scripts
├── README.md
├── build
├── check
├── develop
├── initialize
├── initialize.py
├── jupyter
├── setup
└── test
└── tests
├── README.md
├── __init__.py
├── conftest.py
├── data.json
├── mosquitto.conf
├── test_mqtt.py
├── test_routes.py
└── test_validation.py
/.github/workflows/openapi.yml:
--------------------------------------------------------------------------------
1 | name: openapi
2 | on:
3 | push:
4 | branches: [main]
5 | paths:
6 | - server/openapi.yml
7 | - .github/workflows/openapi.yml
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout repository
13 | uses: actions/checkout@v3
14 | - name: Deploy OpenAPI documentation
15 | uses: bump-sh/github-action@v1
16 | with:
17 | file: server/openapi.yml
18 | doc: 24616c25-ad93-410b-8a2f-d3a9b96c04c6
19 | token: ${{ secrets.BUMP_TOKEN }}
20 |
--------------------------------------------------------------------------------
/.github/workflows/publication.yml:
--------------------------------------------------------------------------------
1 | name: Publication
2 | on:
3 | push:
4 | paths:
5 | - publication/**
6 | - .github/workflows/publication.yml
7 |
8 | jobs:
9 | paper:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout repository
13 | uses: actions/checkout@v3
14 | - name: Build PDF
15 | uses: openjournals/openjournals-draft-action@master
16 | with:
17 | journal: joss
18 | paper-path: publication/paper.md
19 | - name: Upload artifact
20 | uses: actions/upload-artifact@v4
21 | with:
22 | name: paper
23 | path: publication/paper.pdf
24 |
--------------------------------------------------------------------------------
/.github/workflows/test-server.yml:
--------------------------------------------------------------------------------
1 | name: test-server
2 | on:
3 | push:
4 | branches: [main]
5 | paths:
6 | - server/**
7 | - .github/workflows/test-server.yml
8 | pull_request:
9 | paths:
10 | - server/**
11 | - .github/workflows/test-server.yml
12 | jobs:
13 | test:
14 | runs-on: ubuntu-latest
15 | defaults:
16 | run:
17 | working-directory: server
18 | shell: bash
19 | steps:
20 | - name: Checkout repository
21 | uses: actions/checkout@v3
22 | - name: Set up Python
23 | uses: actions/setup-python@v4 # Uses the Python version in .python-version
24 | with:
25 | python-version-file: server/.python-version
26 | - name: Install poetry
27 | uses: snok/install-poetry@v1
28 | with:
29 | virtualenvs-create: true
30 | virtualenvs-in-project: true
31 | installer-parallel: true
32 | - name: Load virtual environment cache
33 | id: cache
34 | uses: actions/cache@v2
35 | with:
36 | path: server/.venv
37 | key: ${{ runner.os }}-${{ hashFiles('server/poetry.lock') }}-2 # Increment to invalidate cache
38 | - name: Install dependencies
39 | if: steps.cache.outputs.cache-hit != 'true'
40 | run: scripts/setup
41 | - name: Run tests
42 | run: scripts/test
43 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 iterize
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Tenta
4 |
5 | 
6 |
7 | Tenta allows you to manage sensors remotely and in real-time:
8 |
9 | - Collect and store measurements and logs from sensors
10 | - Configure sensors remotely
11 | - Monitor sensors in real-time with an intuitive dashboard
12 |
13 | Tenta is lightweight and composable. It is designed to be used as a building block in your IoT stack, together with other awesome tools like [Grafana](https://grafana.com/), [DuckDB](https://duckdb.org/), or [polars](https://www.pola.rs/). Sensors connect to Tenta over a language-independent MQTT interface.
14 |
15 | _Read the documentation at [tenta.onrender.com](https://tenta.onrender.com/)_
16 |
17 | ## Try it out!
18 |
19 | You can try out Tenta in a few minutes with Docker Compose. Clone the repository and run:
20 |
21 | ```sh
22 | NEXT_PUBLIC_BUILD_TIMESTAMP=$(date +%s) COMMIT_SHA=$(git rev-parse --verify HEAD) BRANCH_NAME=$(git branch --show-current) docker compose up --build
23 | ```
24 |
25 | The dashboard will be available at [http://localhost:3000](http://localhost:3000). You can log in with the default username `happy-un1c0rn` and password `12345678`.
26 |
27 | You can exit the application with `Ctrl+C` and remove the containers with:
28 |
29 | ```sh
30 | docker compose down -v
31 | ```
32 |
33 | ## More
34 |
35 | **Publication:** [](https://joss.theoj.org/papers/5daf8d2d13c01da24e949c20a08d29d0)
36 |
37 | **License:** Tenta is licensed under the [MIT License](https://github.com/iterize/tenta/blob/main/LICENSE).
38 |
39 | **Research:** We are open for collaborations! If you want to use Tenta in your research, don't hesitate to reach out to contact@iterize.dev. We are happy to help you get started and provide support.
40 |
41 | **Contributing:** We are happy about contributions to Tenta! You can start by reading [our contribution guide](https://tenta.onrender.com/contribute).
42 |
43 | **Versioning:** Tenta's MQTT, HTTP, and database interfaces adhere to Semantic Versioning. Changes will be tracked in release notes. Please expect breaking changes until we reach version 1.0.0.
44 |
--------------------------------------------------------------------------------
/dashboard/.env.development:
--------------------------------------------------------------------------------
1 | # required | URL of the tenta server (no trailing slash!)
2 | NEXT_PUBLIC_SERVER_URL="http://localhost:8421"
3 |
4 | # optional | rendered on the login page
5 | NEXT_PUBLIC_CONTACT_EMAIL="contact.email@login.page"
6 |
7 | # optional | rendered in the header
8 | NEXT_PUBLIC_INSTANCE_TITLE="Your Department Name"
9 |
10 |
--------------------------------------------------------------------------------
/dashboard/.env.template:
--------------------------------------------------------------------------------
1 | # put this into the ".env.local" file
2 |
3 | # required | URL of the tenta server (no trailing slash!)
4 | NEXT_PUBLIC_SERVER_URL="http://your-server-domain.com"
5 |
6 | # optional | rendered on the login page
7 | NEXT_PUBLIC_CONTACT_EMAIL="contact.email@login.page"
8 |
9 | # optional | rendered in the header
10 | NEXT_PUBLIC_INSTANCE_TITLE="Your Department Name"
11 |
12 |
--------------------------------------------------------------------------------
/dashboard/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/dashboard/.gitignore:
--------------------------------------------------------------------------------
1 | # custom
2 | hidden/
3 |
4 | # Created by https://www.toptal.com/developers/gitignore/api/node,nextjs
5 | # Edit at https://www.toptal.com/developers/gitignore?templates=node,nextjs
6 |
7 | ### NextJS ###
8 | # dependencies
9 | /node_modules
10 | /.pnp
11 | .pnp.js
12 |
13 | # testing
14 | /coverage
15 |
16 | # next.js
17 | /.next/
18 | /out/
19 |
20 | # production
21 | /build
22 |
23 | # misc
24 | .DS_Store
25 | *.pem
26 |
27 | # debug
28 | npm-debug.log*
29 | yarn-debug.log*
30 | yarn-error.log*
31 | .pnpm-debug.log*
32 |
33 | # local env files
34 | .env*.local
35 |
36 | # vercel
37 | .vercel
38 |
39 | # typescript
40 | *.tsbuildinfo
41 | next-env.d.ts
42 |
43 | ### Node ###
44 | # Logs
45 | logs
46 | *.log
47 | lerna-debug.log*
48 |
49 | # Diagnostic reports (https://nodejs.org/api/report.html)
50 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
51 |
52 | # Runtime data
53 | pids
54 | *.pid
55 | *.seed
56 | *.pid.lock
57 |
58 | # Directory for instrumented libs generated by jscoverage/JSCover
59 | lib-cov
60 |
61 | # Coverage directory used by tools like istanbul
62 | coverage
63 | *.lcov
64 |
65 | # nyc test coverage
66 | .nyc_output
67 |
68 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
69 | .grunt
70 |
71 | # Bower dependency directory (https://bower.io/)
72 | bower_components
73 |
74 | # node-waf configuration
75 | .lock-wscript
76 |
77 | # Compiled binary addons (https://nodejs.org/api/addons.html)
78 | build/Release
79 |
80 | # Dependency directories
81 | node_modules/
82 | jspm_packages/
83 |
84 | # Snowpack dependency directory (https://snowpack.dev/)
85 | web_modules/
86 |
87 | # TypeScript cache
88 |
89 | # Optional npm cache directory
90 | .npm
91 |
92 | # Optional eslint cache
93 | .eslintcache
94 |
95 | # Optional stylelint cache
96 | .stylelintcache
97 |
98 | # Microbundle cache
99 | .rpt2_cache/
100 | .rts2_cache_cjs/
101 | .rts2_cache_es/
102 | .rts2_cache_umd/
103 |
104 | # Optional REPL history
105 | .node_repl_history
106 |
107 | # Output of 'npm pack'
108 | *.tgz
109 |
110 | # Yarn Integrity file
111 | .yarn-integrity
112 |
113 | # dotenv environment variable files
114 | .env
115 | .env.development.local
116 | .env.test.local
117 | .env.production.local
118 | .env.local
119 |
120 | # parcel-bundler cache (https://parceljs.org/)
121 | .cache
122 | .parcel-cache
123 |
124 | # Next.js build output
125 | .next
126 | out
127 |
128 | # Nuxt.js build / generate output
129 | .nuxt
130 | dist
131 |
132 | # Gatsby files
133 | .cache/
134 | # Comment in the public line in if your project uses Gatsby and not Next.js
135 | # https://nextjs.org/blog/next-9-1#public-directory-support
136 | # public
137 |
138 | # vuepress build output
139 | .vuepress/dist
140 |
141 | # vuepress v2.x temp and cache directory
142 | .temp
143 |
144 | # Docusaurus cache and generated files
145 | .docusaurus
146 |
147 | # Serverless directories
148 | .serverless/
149 |
150 | # FuseBox cache
151 | .fusebox/
152 |
153 | # DynamoDB Local files
154 | .dynamodb/
155 |
156 | # TernJS port file
157 | .tern-port
158 |
159 | # Stores VSCode versions used for testing VSCode extensions
160 | .vscode-test
161 |
162 | # yarn v2
163 | .yarn/cache
164 | .yarn/unplugged
165 | .yarn/build-state.yml
166 | .yarn/install-state.gz
167 | .pnp.*
168 |
169 | ### Node Patch ###
170 | # Serverless Webpack directories
171 | .webpack/
172 |
173 | # Optional stylelint cache
174 |
175 | # SvelteKit build / generate output
176 | .svelte-kit
177 |
178 | # End of https://www.toptal.com/developers/gitignore/api/node,nextjs
--------------------------------------------------------------------------------
/dashboard/Dockerfile:
--------------------------------------------------------------------------------
1 | # syntax = docker/dockerfile:1
2 |
3 | # Adjust NODE_VERSION as desired
4 | ARG NODE_VERSION=20.6.1
5 | FROM node:${NODE_VERSION}-slim AS base
6 |
7 | LABEL fly_launch_runtime="Next.js"
8 |
9 | # Next.js app lives here
10 | WORKDIR /app
11 |
12 | # Throw-away build stage to reduce size of final image
13 | FROM base AS build
14 |
15 | # Install packages needed to build node modules
16 | RUN apt-get update -qq && \
17 | apt-get install -y build-essential pkg-config python-is-python3
18 |
19 | # Install node modules
20 | COPY --link package-lock.json package.json ./
21 | RUN npm ci --include=dev
22 |
23 | # Copy application code
24 | COPY --link . .
25 |
26 | # Set production environment
27 | ENV NODE_ENV="production"
28 | ARG NEXT_PUBLIC_SERVER_URL="https://url-to-your-server.com"
29 | ARG NEXT_PUBLIC_INSTANCE_TITLE="Professorship of Environmental Sensing and Modeling"
30 |
31 | # Build application
32 | RUN npm run build
33 |
34 | # Remove development dependencies
35 | RUN npm prune --omit=dev
36 |
37 | # Final stage for app image
38 | FROM base
39 |
40 | # Copy built application
41 | COPY --from=build /app /app
42 |
43 | # Start the server by default, this can be overwritten at runtime
44 | EXPOSE 3000
45 | CMD [ "npm", "run", "start" ]
46 |
--------------------------------------------------------------------------------
/dashboard/README.md:
--------------------------------------------------------------------------------
1 | # Tenta Dashboard
2 |
3 | Built using NextJS 13, Typscript, TailwindCSS and ShadcnUI.
4 |
5 | Run the development server with:
6 |
7 | ```bash
8 | npm run dev
9 | ```
10 |
--------------------------------------------------------------------------------
/dashboard/components.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://ui.shadcn.com/schema.json",
3 | "style": "new-york",
4 | "rsc": false,
5 | "tsx": true,
6 | "tailwind": {
7 | "config": "tailwind.config.js",
8 | "css": "src/app/globals.css",
9 | "baseColor": "slate",
10 | "cssVariables": true
11 | },
12 | "aliases": {
13 | "components": "@/components",
14 | "utils": "@/lib/utils"
15 | }
16 | }
--------------------------------------------------------------------------------
/dashboard/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | images: { unoptimized: true }
4 | }
5 |
6 | module.exports = nextConfig
7 |
--------------------------------------------------------------------------------
/dashboard/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tenta-dashboard",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "export NEXT_PUBLIC_BUILD_TIMESTAMP=$(date +%s) && export NEXT_PUBLIC_COMMIT_SHA=$(git rev-parse HEAD) && export NEXT_PUBLIC_BRANCH_NAME=$(git branch --show-current) && next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@radix-ui/react-dialog": "^1.0.4",
13 | "@radix-ui/react-icons": "^1.3.0",
14 | "@radix-ui/react-label": "^2.0.2",
15 | "@radix-ui/react-select": "^1.2.2",
16 | "@radix-ui/react-slot": "^1.0.2",
17 | "@radix-ui/react-tabs": "^1.0.4",
18 | "@radix-ui/react-tooltip": "^1.0.6",
19 | "@tabler/icons-react": "^2.32.0",
20 | "@types/d3": "^7.4.0",
21 | "@types/date-fns": "^2.6.0",
22 | "@types/js-cookie": "^3.0.3",
23 | "@types/lodash": "^4.14.197",
24 | "@types/node": "20.5.7",
25 | "@types/react": "18.2.21",
26 | "@types/react-dom": "18.2.7",
27 | "autoprefixer": "10.4.15",
28 | "axios": "^1.5.0",
29 | "class-variance-authority": "^0.7.0",
30 | "clsx": "^2.0.0",
31 | "d3": "^7.8.5",
32 | "date-fns": "^2.30.0",
33 | "eslint": "8.48.0",
34 | "eslint-config-next": "13.4.19",
35 | "js-cookie": "^3.0.5",
36 | "lodash": "^4.17.21",
37 | "next": "^14.0.2",
38 | "postcss": "8.4.28",
39 | "prettier": "^3.0.3",
40 | "react": "18.2.0",
41 | "react-dom": "18.2.0",
42 | "react-hot-toast": "^2.4.1",
43 | "swr": "^2.2.2",
44 | "tailwind-merge": "^1.14.0",
45 | "tailwindcss": "3.3.3",
46 | "tailwindcss-animate": "^1.0.7",
47 | "typescript": "5.2.2",
48 | "zod": "^3.22.2"
49 | },
50 | "devDependencies": {
51 | "@flydotio/dockerfile": "^0.4.10"
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/dashboard/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/dashboard/public/tenta-artwork-recolored.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/public/tenta-artwork-recolored.png
--------------------------------------------------------------------------------
/dashboard/public/tenta-banner-dashboard-og-1024-512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/public/tenta-banner-dashboard-og-1024-512.png
--------------------------------------------------------------------------------
/dashboard/public/tenta-banner-dashboard-og-1200-630.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/public/tenta-banner-dashboard-og-1200-630.png
--------------------------------------------------------------------------------
/dashboard/public/tenta-favicon-1024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/public/tenta-favicon-1024.png
--------------------------------------------------------------------------------
/dashboard/public/tenta-favicon-512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/public/tenta-favicon-512.png
--------------------------------------------------------------------------------
/dashboard/src/.gitignore:
--------------------------------------------------------------------------------
1 | !lib
--------------------------------------------------------------------------------
/dashboard/src/app/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/dashboard/src/app/favicon.ico
--------------------------------------------------------------------------------
/dashboard/src/app/globals.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | @layer base {
6 | :root {
7 | --background: 0 0% 100%;
8 | --foreground: 222.2 84% 4.9%;
9 |
10 | --card: 0 0% 100%;
11 | --card-foreground: 222.2 84% 4.9%;
12 |
13 | --popover: 0 0% 100%;
14 | --popover-foreground: 222.2 84% 4.9%;
15 |
16 | --primary: 222.2 47.4% 11.2%;
17 | --primary-foreground: 210 40% 98%;
18 |
19 | --secondary: 210 40% 96.1%;
20 | --secondary-foreground: 222.2 47.4% 11.2%;
21 |
22 | --muted: 210 40% 96.1%;
23 | --muted-foreground: 215.4 16.3% 46.9%;
24 |
25 | --accent: 210 40% 96.1%;
26 | --accent-foreground: 222.2 47.4% 11.2%;
27 |
28 | --destructive: 0 84.2% 60.2%;
29 | --destructive-foreground: 210 40% 98%;
30 |
31 | --border: 214.3 31.8% 91.4%;
32 | --input: 214.3 31.8% 91.4%;
33 | --ring: 222.2 84% 4.9%;
34 |
35 | --radius: 0.5rem;
36 | }
37 |
38 | .dark {
39 | --background: 222.2 84% 4.9%;
40 | --foreground: 210 40% 98%;
41 |
42 | --card: 222.2 84% 4.9%;
43 | --card-foreground: 210 40% 98%;
44 |
45 | --popover: 222.2 84% 4.9%;
46 | --popover-foreground: 210 40% 98%;
47 |
48 | --primary: 210 40% 98%;
49 | --primary-foreground: 222.2 47.4% 11.2%;
50 |
51 | --secondary: 217.2 32.6% 17.5%;
52 | --secondary-foreground: 210 40% 98%;
53 |
54 | --muted: 217.2 32.6% 17.5%;
55 | --muted-foreground: 215 20.2% 65.1%;
56 |
57 | --accent: 217.2 32.6% 17.5%;
58 | --accent-foreground: 210 40% 98%;
59 |
60 | --destructive: 0 62.8% 30.6%;
61 | --destructive-foreground: 210 40% 98%;
62 |
63 | --border: 217.2 32.6% 17.5%;
64 | --input: 217.2 32.6% 17.5%;
65 | --ring: 212.7 26.8% 83.9%;
66 | }
67 | }
68 |
69 | @layer base {
70 | * {
71 | @apply border-border;
72 | }
73 | body {
74 | @apply bg-background text-foreground;
75 | }
76 | }
77 |
78 | .background-paper-pattern {
79 | z-index: -1;
80 | background-size: 8rem 1.6rem;
81 | background-position: -1px -1px;
82 | background-color: #ffffff;
83 |
84 | background-image: url("data:image/svg+xml,%3Csvg width='100' height='20' viewBox='0 0 100 20' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath d='M21.184 20c.357-.13.72-.264 1.088-.402l1.768-.661C33.64 15.347 39.647 14 50 14c10.271 0 15.362 1.222 24.629 4.928.955.383 1.869.74 2.75 1.072h6.225c-2.51-.73-5.139-1.691-8.233-2.928C65.888 13.278 60.562 12 50 12c-10.626 0-16.855 1.397-26.66 5.063l-1.767.662c-2.475.923-4.66 1.674-6.724 2.275h6.335zm0-20C13.258 2.892 8.077 4 0 4V2c5.744 0 9.951-.574 14.85-2h6.334zM77.38 0C85.239 2.966 90.502 4 100 4V2c-6.842 0-11.386-.542-16.396-2h-6.225zM0 14c8.44 0 13.718-1.21 22.272-4.402l1.768-.661C33.64 5.347 39.647 4 50 4c10.271 0 15.362 1.222 24.629 4.928C84.112 12.722 89.438 14 100 14v-2c-10.271 0-15.362-1.222-24.629-4.928C65.888 3.278 60.562 2 50 2 39.374 2 33.145 3.397 23.34 7.063l-1.767.662C13.223 10.84 8.163 12 0 12v2z' fill='%23e2e8f0' fill-opacity='0.4' fill-rule='evenodd'/%3E%3C/svg%3E");
85 | }
86 |
--------------------------------------------------------------------------------
/dashboard/src/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import "./globals.css";
2 | import type { Metadata } from "next";
3 | import { Rubik } from "next/font/google";
4 | import { SWRProvider } from "@/app/swr-provider";
5 | import { NavigationBar } from "@/components/custom/navigation-bar";
6 | import { Toaster } from "react-hot-toast";
7 |
8 | const rubik = Rubik({ subsets: ["latin"], display: "swap" });
9 |
10 | export const metadata: Metadata = {
11 | metadataBase: new URL("https://someridiculousdomaintogetridofthaterror.com"),
12 | title: "Tenta Dashboard",
13 | description: "Remote and real-time management of distributed sensor networks",
14 | openGraph: {
15 | type: "website",
16 | locale: "en_IE",
17 | url: "https://github.com/iterize/tenta",
18 | title: "Tenta Dashboard",
19 | description:
20 | "Remote and real-time management of distributed sensor networks",
21 | },
22 | };
23 |
24 | export default function RootLayout({
25 | children,
26 | }: {
27 | children: React.ReactNode;
28 | }) {
29 | return (
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
43 |
44 |
45 |
46 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 | {children}
59 |
60 |
61 |
62 |
63 |
64 | );
65 | }
66 |
--------------------------------------------------------------------------------
/dashboard/src/app/login/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { use, useState } from "react";
4 | import { Input } from "@/components/ui/input";
5 | import { Button } from "@/components/ui/button";
6 | import { useUser } from "@/requests/user";
7 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
8 | import { redirect } from "next/navigation";
9 | import Link from "next/link";
10 | import toast from "react-hot-toast";
11 | import { TheTenta } from "@/components/custom/the-tenta";
12 | import { useStatus } from "@/requests/status";
13 |
14 | export default function Page() {
15 | const [username, setUsername] = useState("");
16 | const [password, setPassword] = useState("");
17 | const [isSubmitting, setIsSubmitting] = useState(false);
18 |
19 | const { userData, userDataIsloading, loginUser } = useUser();
20 |
21 | const serverStatus = useStatus();
22 |
23 | async function submit() {
24 | setIsSubmitting(true);
25 | try {
26 | await toast.promise(loginUser(username, password), {
27 | loading: "Authenticating",
28 | success: "Successfully authenticated",
29 | error: "Failed to authenticate",
30 | });
31 | setUsername("");
32 | setPassword("");
33 | } catch (error) {
34 | console.error(error);
35 | } finally {
36 | setIsSubmitting(false);
37 | }
38 | }
39 |
40 | if (userDataIsloading || serverStatus === undefined) {
41 | return ;
42 | } else if (userData !== undefined) {
43 | redirect("/");
44 | }
45 |
46 | const contactEmail = process.env.NEXT_PUBLIC_CONTACT_EMAIL;
47 |
48 | return (
49 | <>
50 |
51 |
52 |
53 |
54 |
55 |
56 |
Login
57 |
setUsername(e.target.value)}
63 | />
64 |
setPassword(e.target.value)}
70 | />
71 |
72 |
76 | Sign up instead
77 |
78 |
79 |
86 |
87 | {contactEmail !== undefined && (
88 |
89 | If you have questions about this Tenta instance, please contact{" "}
90 |
91 | {contactEmail}
92 |
93 |
94 | )}
95 |
96 |
97 | >
98 | );
99 | }
100 |
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { TheTenta } from "@/components/custom/the-tenta";
4 |
5 | export default function Page(props: { params: { networkIdentifier: string } }) {
6 | return (
7 |
8 |
9 | please select a sensor in the
10 | list
11 |
12 |
13 |
14 | );
15 | }
16 |
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/sensors/[sensorIdentifier]/.gitignore:
--------------------------------------------------------------------------------
1 | !logs/
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/sensors/[sensorIdentifier]/layout.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | export default function Page(props: {
4 | children: React.ReactNode;
5 | params: { networkIdentifier: string; sensorIdentifier: string };
6 | }) {
7 | return (
8 |
9 | {props.children}
10 |
11 | );
12 | }
13 |
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/sensors/[sensorIdentifier]/logs/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
4 | import { useSensors } from "@/requests/sensors";
5 | import { useUser } from "@/requests/user";
6 | import { redirect } from "next/navigation";
7 | import { useEffect, useState } from "react";
8 | import { Pagination } from "@/components/custom/pagination";
9 | import { Button } from "@/components/ui/button";
10 | import toast from "react-hot-toast";
11 | import { formatDistanceToNow } from "date-fns";
12 | import { useLogs } from "@/requests/logs";
13 | import { IconDatabaseExclamation } from "@tabler/icons-react";
14 | import { ConfigRevisionTag } from "@/components/custom/config-revision-tag";
15 | import { Spinner } from "@/components/custom/spinner";
16 |
17 | export default function Page(props: {
18 | params: { networkIdentifier: string; sensorIdentifier: string };
19 | }) {
20 | const { userData, userDataIsloading, logoutUser } = useUser();
21 |
22 | const [currentPageNumber, setCurrentPageNumber] = useState(1);
23 |
24 | const { sensorsData } = useSensors(
25 | userData?.accessToken,
26 | logoutUser,
27 | props.params.networkIdentifier
28 | );
29 | const {
30 | logsData,
31 | logsDataFetchingState,
32 | numberOfLogsPages,
33 | fetchNewerLogs,
34 | fetchOlderLogs,
35 | } = useLogs(
36 | userData?.accessToken,
37 | logoutUser,
38 | props.params.networkIdentifier,
39 | props.params.sensorIdentifier
40 | );
41 | const [dataLoadingToastId, setDataLoadingToastId] = useState<
42 | string | undefined
43 | >();
44 |
45 | useEffect(() => {
46 | const interval = setInterval(() => {
47 | console.log(
48 | `fetching newer logs for sensor ${props.params.sensorIdentifier}`
49 | );
50 | fetchNewerLogs();
51 | }, 5000);
52 |
53 | return () => clearInterval(interval);
54 | });
55 |
56 | useEffect(() => {
57 | if (
58 | logsDataFetchingState === "user-fetching" &&
59 | dataLoadingToastId === undefined
60 | ) {
61 | setDataLoadingToastId(toast.loading("loading data"));
62 | }
63 | if (
64 | (logsDataFetchingState === "new data" ||
65 | logsDataFetchingState === "no new data") &&
66 | dataLoadingToastId !== undefined
67 | ) {
68 | setDataLoadingToastId(undefined);
69 | toast.success(logsDataFetchingState, {
70 | id: dataLoadingToastId,
71 | duration: 1500,
72 | });
73 | }
74 | }, [logsDataFetchingState, dataLoadingToastId]);
75 |
76 | // when new data is fetched, go to the last page
77 | useEffect(() => {
78 | setCurrentPageNumber(numberOfLogsPages);
79 | }, [numberOfLogsPages]);
80 |
81 | // when page is left, dismiss all toasts
82 | useEffect(() => {
83 | return () => toast.dismiss();
84 | }, []);
85 |
86 | if (userDataIsloading || sensorsData === undefined) {
87 | return ;
88 | } else if (userData === undefined) {
89 | redirect("/login");
90 | }
91 |
92 | const sensor = sensorsData?.find(
93 | (sensor) => sensor.identifier === props.params.sensorIdentifier
94 | );
95 |
96 | if (sensor === undefined) {
97 | return "unknown sensor id";
98 | }
99 |
100 | return (
101 | <>
102 |
103 |
104 |
105 |
106 |
107 |
Raw Logs
108 |
109 |
110 |
111 |
122 |
123 |
124 | {logsDataFetchingState === "background-fetching" && }
125 |
126 |
127 |
128 | {logsDataFetchingState !== "background-fetching" &&
129 | logsData.length === 0 && (
130 |
131 | no logs
132 |
133 | )}
134 | {logsData.map((log) => (
135 |
139 |
143 |
144 |
145 |
146 | {formatDistanceToNow(new Date(log.creationTimestamp * 1000), {
147 | addSuffix: true,
148 | })}
149 |
150 |
151 |
152 | {new Date(log.creationTimestamp * 1000).toISOString()}
153 |
154 |
155 |
156 |
157 |
165 | {log.severity}
166 | {" "}
167 | {log.message}
168 |
169 |
170 | ))}
171 |
172 | >
173 | );
174 | }
175 |
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/sensors/[sensorIdentifier]/measurements/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
4 | import { useSensors } from "@/requests/sensors";
5 | import { useUser } from "@/requests/user";
6 | import { redirect } from "next/navigation";
7 | import { useEffect, useState } from "react";
8 | import { Pagination } from "@/components/custom/pagination";
9 | import { Button } from "@/components/ui/button";
10 | import { useMeasurements } from "@/requests/measurements";
11 | import toast from "react-hot-toast";
12 | import { formatDistanceToNow } from "date-fns";
13 | import { IconDatabaseSearch } from "@tabler/icons-react";
14 | import { ConfigRevisionTag } from "@/components/custom/config-revision-tag";
15 | import { Spinner } from "@/components/custom/spinner";
16 |
17 | export default function Page(props: {
18 | params: { networkIdentifier: string; sensorIdentifier: string };
19 | }) {
20 | const { userData, userDataIsloading, logoutUser } = useUser();
21 |
22 | const [currentPageNumber, setCurrentPageNumber] = useState(1);
23 |
24 | const { sensorsData } = useSensors(
25 | userData?.accessToken,
26 | logoutUser,
27 | props.params.networkIdentifier
28 | );
29 | const {
30 | measurementsData,
31 | measurementsDataFetchingState,
32 | numberOfMeasurementsPages,
33 | fetchNewerMeasurements,
34 | fetchOlderMeasurements,
35 | } = useMeasurements(
36 | userData?.accessToken,
37 | logoutUser,
38 | props.params.networkIdentifier,
39 | props.params.sensorIdentifier
40 | );
41 | const [dataLoadingToastId, setDataLoadingToastId] = useState<
42 | string | undefined
43 | >();
44 |
45 | useEffect(() => {
46 | const interval = setInterval(() => {
47 | console.log(
48 | `fetching newer measurements for sensor ${props.params.sensorIdentifier}`
49 | );
50 | fetchNewerMeasurements();
51 | }, 5000);
52 |
53 | return () => clearInterval(interval);
54 | });
55 |
56 | useEffect(() => {
57 | if (
58 | measurementsDataFetchingState === "user-fetching" &&
59 | dataLoadingToastId === undefined
60 | ) {
61 | setDataLoadingToastId(toast.loading("loading data"));
62 | }
63 | if (
64 | (measurementsDataFetchingState === "new data" ||
65 | measurementsDataFetchingState === "no new data") &&
66 | dataLoadingToastId !== undefined
67 | ) {
68 | setDataLoadingToastId(undefined);
69 | toast.success(measurementsDataFetchingState, {
70 | id: dataLoadingToastId,
71 | duration: 1500,
72 | });
73 | }
74 | }, [measurementsDataFetchingState, dataLoadingToastId, measurementsData]);
75 |
76 | // when new data is fetched, go to the last page
77 | useEffect(() => {
78 | setCurrentPageNumber(numberOfMeasurementsPages);
79 | }, [numberOfMeasurementsPages]);
80 |
81 | // when page is left, dismiss all toasts
82 | useEffect(() => {
83 | return () => toast.dismiss();
84 | }, []);
85 |
86 | if (userDataIsloading || sensorsData === undefined) {
87 | return ;
88 | } else if (userData === undefined) {
89 | redirect("/login");
90 | }
91 |
92 | const sensor = sensorsData?.find(
93 | (sensor) => sensor.identifier === props.params.sensorIdentifier
94 | );
95 |
96 | if (sensor === undefined) {
97 | return "unknown sensor id";
98 | }
99 |
100 | return (
101 | <>
102 |
103 |
104 |
105 |
106 |
107 |
108 | Raw Measurements
109 |
110 |
111 |
112 |
113 |
124 |
125 |
126 | {measurementsDataFetchingState === "background-fetching" && (
127 |
128 | )}
129 |
130 |
131 |
132 | {measurementsDataFetchingState !== "background-fetching" &&
133 | measurementsData.length === 0 && (
134 |
135 | no measurements
136 |
137 | )}
138 | {measurementsData
139 | .slice((currentPageNumber - 1) * 64, currentPageNumber * 64)
140 | .map((measurement) => (
141 |
145 |
149 |
150 |
151 |
152 | {formatDistanceToNow(
153 | new Date(measurement.creationTimestamp * 1000),
154 | {
155 | addSuffix: true,
156 | }
157 | )}
158 |
159 |
160 |
161 | {new Date(
162 | measurement.creationTimestamp * 1000
163 | ).toISOString()}
164 |
165 |
166 |
167 |
168 | {Object.entries(measurement.value).map(([key, value]) => (
169 |
173 |
174 | {key}:
175 |
176 |
{value}
177 |
178 | ))}
179 |
180 |
181 | ))}
182 |
183 | >
184 | );
185 | }
186 |
--------------------------------------------------------------------------------
/dashboard/src/app/networks/[networkIdentifier]/sensors/[sensorIdentifier]/plots/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
4 | import { useMeasurementsAggregation } from "@/requests/measurements-aggregation";
5 | import { useUser } from "@/requests/user";
6 | import { redirect } from "next/navigation";
7 | import { useEffect, useRef } from "react";
8 | import * as d3 from "d3";
9 | import { maxBy, minBy, range } from "lodash";
10 | import { IconChartHistogram } from "@tabler/icons-react";
11 |
12 | export default function Page(props: {
13 | params: { networkIdentifier: string; sensorIdentifier: string };
14 | }) {
15 | const { userData, userDataIsloading, logoutUser } = useUser();
16 | const { measurementsAggregationData } = useMeasurementsAggregation(
17 | userData?.accessToken,
18 | logoutUser,
19 | props.params.networkIdentifier,
20 | props.params.sensorIdentifier
21 | );
22 |
23 | if (userDataIsloading || measurementsAggregationData === undefined) {
24 | return ;
25 | } else if (userData === undefined) {
26 | redirect("/login");
27 | }
28 |
29 | console.log(measurementsAggregationData);
30 |
31 | return (
32 | <>
33 |
34 |
35 |
36 |
37 |
38 |
39 | Plots
40 | last 4 weeks
41 |
42 |
43 |
44 |
Plot times in UTC
45 |
46 | {Object.keys(measurementsAggregationData)
47 | .sort()
48 | .map((key) => (
49 |
54 | ))}
55 | {Object.keys(measurementsAggregationData).length === 0 && (
56 | no measurements
57 | )}
58 | >
59 | );
60 | }
61 |
62 | function MeasurementAggregationPlot(props: {
63 | label: string;
64 | data: { average: number; bucketTimestamp: number }[];
65 | }) {
66 | const plotRef = useRef(null);
67 |
68 | useEffect(() => {
69 | const svg = d3.select(plotRef.current);
70 |
71 | const now = new Date();
72 | const nextUTCMidnightTimestamp =
73 | Math.floor(now.getTime() / 1000) -
74 | now.getUTCSeconds() -
75 | now.getUTCMinutes() * 60 -
76 | now.getUTCHours() * 3600 +
77 | 24 * 3600;
78 |
79 | const maxX = nextUTCMidnightTimestamp + 7200;
80 | const minX = nextUTCMidnightTimestamp - 29 * 24 * 3600 - 7200;
81 |
82 | let minY = minBy(props.data, (d) => d.average)?.average;
83 | let maxY = maxBy(props.data, (d) => d.average)?.average;
84 |
85 | if (minY === undefined || maxY === undefined) {
86 | return;
87 | }
88 |
89 | const dy = maxY - minY;
90 | minY -= dy * 0.1;
91 | maxY += dy * 0.1;
92 |
93 | const xScale = d3.scaleLinear([minX, maxX], [65, 1050]);
94 | const yScale = d3.scaleLinear([minY, maxY], [130, 10]);
95 |
96 | svg.selectAll("*").remove();
97 |
98 | const utcMidnightTimestamps = range(minX + 7200, maxX, 24 * 3600);
99 |
100 | svg
101 | .append("g")
102 | .attr("class", "major-x-tick-lines text-slate-300 z-0")
103 | .selectAll("line")
104 | .data(utcMidnightTimestamps)
105 | .enter()
106 | .append("line")
107 | .attr("x1", (d) => xScale(d))
108 | .attr("x2", (d) => xScale(d))
109 | .attr("y1", yScale(minY))
110 | .attr("y2", yScale(maxY))
111 | .attr("stroke", "currentColor");
112 |
113 | svg
114 | .append("g")
115 | .attr("class", "minor-x-tick-lines text-slate-150 z-0")
116 | .selectAll("line")
117 | .data(
118 | range(minX + 3600 * 2, maxX, 6 * 3600).filter(
119 | (d) => !utcMidnightTimestamps.includes(d)
120 | )
121 | )
122 | .enter()
123 | .append("line")
124 | .attr("x1", (d) => xScale(d))
125 | .attr("x2", (d) => xScale(d))
126 | .attr("y1", yScale(minY))
127 | .attr("y2", yScale(maxY))
128 | .attr("stroke", "currentColor");
129 |
130 | svg
131 | .append("g")
132 | .attr(
133 | "class",
134 | "major-x-tick-labels text-slate-600 z-10 text-xs font-medium"
135 | )
136 | .selectAll("text")
137 | .data(
138 | range(minX + 3600 + 12 * 3600, maxX - 3599 - 12 * 3600, 3 * 24 * 3600)
139 | )
140 | .enter()
141 | .append("text")
142 | .text((d) =>
143 | new Date(d * 1000).toLocaleDateString("en-US", {
144 | month: "short",
145 | day: "numeric",
146 | })
147 | )
148 | .attr("x", (d) => xScale(d))
149 | .attr("y", 147)
150 | .attr("text-anchor", "middle")
151 | .attr("fill", "currentColor");
152 |
153 | const yTicks = yScale.ticks(5);
154 |
155 | svg
156 | .append("g")
157 | .attr("class", "y-tick-lines text-slate-300 z-0")
158 | .selectAll("line")
159 | .data(yTicks)
160 | .enter()
161 | .append("line")
162 | .attr("x1", xScale(minX - 1 * 3600))
163 | .attr("x2", xScale(maxX - 2 * 3600))
164 | .attr("y1", (d) => yScale(d))
165 | .attr("y2", (d) => yScale(d))
166 | .attr("stroke", "currentColor");
167 |
168 | svg
169 | .append("g")
170 | .attr(
171 | "class",
172 | "y-tick-labels text-slate-600 z-10 text-xs font-medium font-mono"
173 | )
174 | .selectAll("text")
175 | .data(yTicks)
176 | .enter()
177 | .append("text")
178 | .text((d) => d.toPrecision(4))
179 | .attr("x", 60)
180 | .attr("y", (d) => yScale(d) + 4)
181 | .attr("text-anchor", "end")
182 | .attr("fill", "currentColor");
183 |
184 | svg
185 | .append("g")
186 | .attr("class", "data-point-circles text-slate-900 z-10")
187 | .selectAll("circle")
188 | .data(props.data)
189 | .enter()
190 | .append("circle")
191 | .attr("r", 1.25)
192 | .attr("cx", (d) => xScale(d.bucketTimestamp))
193 | .attr("cy", (d) => yScale(d.average))
194 | .attr("fill", "currentColor");
195 |
196 | svg
197 | .append("line")
198 | .attr("class", "current-time-line z-10 stroke-rose-500")
199 | .attr("x1", xScale(now.getTime() / 1000))
200 | .attr("x2", xScale(now.getTime() / 1000))
201 | .attr("y1", yScale(minY) + 2)
202 | .attr("y2", yScale(maxY) - 2)
203 | .attr("stroke-width", 2.5)
204 | .attr("stroke-linecap", "round");
205 |
206 | svg
207 | .append("text")
208 | .attr(
209 | "class",
210 | "current-time-label z-10 text-rose-500 text-[0.65rem] font-semibold"
211 | )
212 | .text("now")
213 | .attr("x", xScale(now.getTime() / 1000) - 5)
214 | .attr("y", yScale(maxY) - 1)
215 | .attr("text-anchor", "end")
216 | .attr("fill", "currentColor");
217 | }, [props.data, plotRef]);
218 |
219 | return (
220 |
221 |
222 | {props.label}
223 |
224 |
225 |
226 |
227 |
228 | );
229 | }
230 |
--------------------------------------------------------------------------------
/dashboard/src/app/offline/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useStatus } from "@/requests/status";
4 | import Link from "next/link";
5 | import { redirect } from "next/navigation";
6 |
7 | export default function Page() {
8 | const serverStatus = useStatus();
9 |
10 | if (serverStatus !== undefined) {
11 | redirect("/login");
12 | }
13 |
14 | return (
15 |
16 |
17 |
18 | Server is Offline
19 |
20 |
21 | Could not reach Tenta server at{" "}
22 |
23 | {process.env.NEXT_PUBLIC_SERVER_URL}
24 |
25 | . Read the Tenta documentation about deployment at{" "}
26 |
31 | tenta.onrender.com/deployment
32 |
33 |
34 |
35 | );
36 | }
37 |
--------------------------------------------------------------------------------
/dashboard/src/app/signup/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useState } from "react";
4 | import { Input } from "@/components/ui/input";
5 | import { Button } from "@/components/ui/button";
6 | import { useUser } from "@/requests/user";
7 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
8 | import { redirect } from "next/navigation";
9 | import Link from "next/link";
10 | import toast from "react-hot-toast";
11 | import { TheTenta } from "@/components/custom/the-tenta";
12 |
13 | export default function Page() {
14 | const [username, setUsername] = useState("");
15 | const [password, setPassword] = useState("");
16 | const [passwordConfirmation, setPasswordConfirmation] = useState("");
17 |
18 | const [isSubmitting, setIsSubmitting] = useState(false);
19 |
20 | const { userData, userDataIsloading, signupUser } = useUser();
21 |
22 | async function submit() {
23 | if (password !== passwordConfirmation) {
24 | toast.error("Passwords do not match");
25 | return;
26 | }
27 |
28 | setIsSubmitting(true);
29 | try {
30 | await toast.promise(signupUser(username, password), {
31 | loading: "Creating new account",
32 | success: "Successfully created new account",
33 | error: "Username already exists",
34 | });
35 | setUsername("");
36 | setPassword("");
37 | } catch (error) {
38 | console.error(error);
39 | } finally {
40 | setIsSubmitting(false);
41 | }
42 | }
43 |
44 | if (userDataIsloading) {
45 | return ;
46 | } else if (userData !== undefined) {
47 | redirect("/");
48 | }
49 |
50 | const contactEmail = process.env.NEXT_PUBLIC_CONTACT_EMAIL;
51 |
52 | return (
53 | <>
54 |
55 |
56 |
57 |
58 |
59 |
60 |
Signup
61 |
setUsername(e.target.value)}
67 | />
68 |
setPassword(e.target.value)}
75 | />
76 |
setPasswordConfirmation(e.target.value)}
83 | />
84 |
85 |
89 | Log in instead
90 |
91 |
92 |
99 |
100 | {contactEmail !== undefined && (
101 |
102 | If you have questions about this Tenta instance, please contact{" "}
103 |
104 | {contactEmail}
105 |
106 |
107 | )}
108 |
109 |
110 | >
111 | );
112 | }
113 |
--------------------------------------------------------------------------------
/dashboard/src/app/style/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { AuthLoadingScreen } from "@/components/custom/auth-loading-screen";
4 | import { NavigationBar } from "@/components/custom/navigation-bar";
5 | import { Button } from "@/components/ui/button";
6 | import { useUser } from "@/requests/user";
7 | import { redirect } from "next/navigation";
8 |
9 | export default function Page() {
10 | return (
11 |
12 |
13 |
Color `blue`
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
Color `red`
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
Color `orange`
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
Color `yellow`
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
Color `eggshell`
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 | );
74 | }
75 |
--------------------------------------------------------------------------------
/dashboard/src/app/swr-provider.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { SWRConfig } from "swr";
4 |
5 | export const SWRProvider = (props: { children: React.ReactNode }) => {
6 | return {props.children};
7 | };
8 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/auth-loading-screen.tsx:
--------------------------------------------------------------------------------
1 | export function AuthLoadingScreen() {
2 | return (
3 |
4 |
5 | loading the application
6 |
7 |
8 | );
9 | }
10 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/config-revision-tag.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Tooltip,
3 | TooltipContent,
4 | TooltipProvider,
5 | TooltipTrigger,
6 | } from "@/components/ui/tooltip";
7 | import { IconFileSettings } from "@tabler/icons-react";
8 |
9 | export function ConfigRevisionTag(props: {
10 | revision: number | null;
11 | to_revision?: number | null;
12 | }) {
13 | const noRevision =
14 | props.to_revision === undefined
15 | ? props.revision === null
16 | : props.revision === null && props.to_revision === null;
17 |
18 | return (
19 |
20 |
21 |
22 |
30 | <>
31 | {" "}
39 | {(noRevision || props.to_revision === undefined) &&
40 | (props.revision === null ? "-" : props.revision)}
41 | {!noRevision && props.to_revision !== undefined && (
42 | <>
43 | from {props.to_revision === null ? "-" : props.to_revision} to{" "}
44 | {props.to_revision === null ? "-" : props.to_revision}
45 | >
46 | )}
47 | >
48 |
49 |
50 |
51 |
52 | {noRevision
53 | ? "No Config Revision"
54 | : `Config Revision ${props.revision}` +
55 | (props.to_revision !== undefined
56 | ? ` to ${props.to_revision}`
57 | : "")}
58 |
59 |
60 |
61 |
62 | );
63 | }
64 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/creation-dialog.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Dialog,
3 | DialogContent,
4 | DialogFooter,
5 | DialogHeader,
6 | DialogTitle,
7 | DialogTrigger,
8 | } from "@/components/ui/dialog";
9 | import { useEffect, useState } from "react";
10 | import toast from "react-hot-toast";
11 | import { Label } from "@/components/ui/label";
12 | import { Input } from "@/components/ui/input";
13 | import { IconCircleCheckFilled, IconCircleDashed } from "@tabler/icons-react";
14 | import { Button } from "@/components/ui/button";
15 |
16 | export function CreationDialog(props: {
17 | action: "create" | "update";
18 | label: "sensor" | "network";
19 | submit: (name: string) => Promise;
20 | onSuccess?: (newIdentifier: string) => void;
21 | children: React.ReactNode;
22 | previousValue?: string;
23 | }) {
24 | const [name, setName] = useState("");
25 | const [isSubmitting, setIsSubmitting] = useState(false);
26 | const [isOpen, setIsOpen] = useState(false);
27 |
28 | const rules = [
29 | {
30 | label: "at least one character",
31 | valid: name.length > 0,
32 | },
33 | {
34 | label: "max. 64 characters",
35 | valid: name.length <= 64,
36 | },
37 | {
38 | label: "only lowercase letters/numbers/ dashes",
39 | valid: name.match(/^[a-z0-9-]*$/) !== null,
40 | },
41 | {
42 | label: "no leading/trailing/consecutive dashes",
43 | valid:
44 | name.match(/--/) === null &&
45 | name.match(/^-/) === null &&
46 | name.match(/-$/) === null,
47 | },
48 | ];
49 |
50 | const formatIsValid = rules.every((rule) => rule.valid);
51 |
52 | async function submit() {
53 | if (!formatIsValid) {
54 | toast.error(`Invalid ${props.label} name`);
55 | return;
56 | }
57 |
58 | setIsSubmitting(true);
59 | try {
60 | await toast.promise(props.submit(name), {
61 | loading: `${
62 | props.action.slice(0, 1).toUpperCase() +
63 | props.action.slice(1, -1) +
64 | "ing"
65 | } ${props.label}`,
66 | success: (data) => {
67 | if (props.onSuccess && typeof data === "string") {
68 | props.onSuccess(data);
69 | }
70 | setIsOpen(false);
71 | return `Successfully ${props.action + "d"} ${props.label}`;
72 | },
73 | error: `Could not ${props.action} ${props.label}`,
74 | });
75 | } catch (error) {
76 | console.error(error);
77 | } finally {
78 | setIsSubmitting(false);
79 | }
80 | }
81 |
82 | return (
83 |
152 | );
153 | }
154 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/navigation-bar.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import Link from "next/link";
4 | import { useUser } from "@/requests/user";
5 | import { Button } from "@/components/ui/button";
6 | import { IconRipple } from "@tabler/icons-react";
7 |
8 | export function NavigationBar() {
9 | const { userData, logoutUser } = useUser();
10 |
11 | return (
12 |
46 | );
47 | }
48 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/pagination.tsx:
--------------------------------------------------------------------------------
1 | import { IconChevronLeft, IconChevronRight } from "@tabler/icons-react";
2 | import { range } from "lodash";
3 | import { clsx } from "clsx";
4 | import { toast } from "react-hot-toast";
5 |
6 | export function Pagination(props: {
7 | currentPageNumber: number;
8 | numberOfPages: number;
9 | setCurrentPageNumber: (page: number) => void;
10 | noDataPlaceholder: string;
11 | }) {
12 | const showLeftDots = props.numberOfPages > 5 && props.currentPageNumber > 3;
13 | const showRightDots =
14 | props.numberOfPages > 5 &&
15 | props.currentPageNumber < props.numberOfPages - 2;
16 |
17 | let visiblePages: number[] = [];
18 | if (showLeftDots && !showRightDots) {
19 | visiblePages = range(props.numberOfPages - 3, props.numberOfPages + 1);
20 | } else if (!showLeftDots && showRightDots) {
21 | visiblePages = [1, 2, 3, 4];
22 | } else if (showLeftDots && showRightDots) {
23 | visiblePages = [
24 | props.currentPageNumber - 1,
25 | props.currentPageNumber,
26 | props.currentPageNumber + 1,
27 | ];
28 | } else {
29 | visiblePages = range(1, props.numberOfPages + 1);
30 | }
31 |
32 | return (
33 |
34 |
47 |
48 | {showLeftDots && (
49 |
50 | ...
51 |
52 | )}
53 |
54 | {visiblePages.length === 0 && (
55 |
56 | {props.noDataPlaceholder}
57 |
58 | )}
59 |
60 | {visiblePages.length > 0 && (
61 | <>
62 |
66 | {props.currentPageNumber}
67 |
68 | >
69 | )}
70 |
71 | {visiblePages.map((pageNumber) => (
72 |
83 | ))}
84 |
85 | {showRightDots && (
86 |
87 | ...
88 |
89 | )}
90 |
91 |
104 |
105 | );
106 | }
107 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/spinner.tsx:
--------------------------------------------------------------------------------
1 | export function Spinner() {
2 | return (
3 |
25 | );
26 | }
27 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/the-tenta.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 |
3 | export function TheTenta(props: { className: string }) {
4 | return (
5 |
21 | );
22 | }
23 |
--------------------------------------------------------------------------------
/dashboard/src/components/custom/timestamp-label.tsx:
--------------------------------------------------------------------------------
1 | import {
2 | Tooltip,
3 | TooltipContent,
4 | TooltipProvider,
5 | TooltipTrigger,
6 | } from "@/components/ui/tooltip";
7 | import { formatDistanceToNow } from "date-fns";
8 |
9 | export function TimestampLabel(props: {
10 | label: string;
11 | timestamp: number | null;
12 | labelClassName?: string;
13 | }) {
14 | if (props.timestamp === null) {
15 | return (
16 |
17 | not {props.label} (yet)
18 |
19 | );
20 | } else {
21 | return (
22 |
23 |
24 |
25 |
26 | {props.label}{" "}
27 | {formatDistanceToNow(new Date(props.timestamp * 1000), {
28 | addSuffix: true,
29 | })}
30 |
31 |
32 | {new Date(props.timestamp * 1000).toISOString()}
33 |
34 |
35 |
36 |
37 | );
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/button.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import { Slot } from "@radix-ui/react-slot";
3 | import { cva, type VariantProps } from "class-variance-authority";
4 |
5 | import { cn } from "@/lib/utils";
6 |
7 | const buttonVariants = cva(
8 | "inline-flex items-center justify-center rounded-md text-sm font-regular transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50",
9 | {
10 | variants: {
11 | variant: {
12 | default:
13 | "bg-primary text-primary-foreground shadow hover:bg-primary/90",
14 | destructive:
15 | "bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90",
16 | outline:
17 | "border border-input bg-transparent shadow-sm hover:bg-accent hover:text-accent-foreground",
18 | secondary:
19 | "bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80",
20 | ghost: "hover:bg-accent hover:text-accent-foreground",
21 | link: "text-primary underline-offset-4 hover:underline",
22 | },
23 | size: {
24 | default: "h-8 px-4 py-2",
25 | sm: "h-8 rounded-md px-3 text-xs",
26 | lg: "h-10 rounded-md px-8",
27 | icon: "h-9 w-9",
28 | },
29 | },
30 | defaultVariants: {
31 | variant: "default",
32 | size: "default",
33 | },
34 | }
35 | );
36 |
37 | export interface ButtonProps
38 | extends React.ButtonHTMLAttributes,
39 | VariantProps {
40 | asChild?: boolean;
41 | }
42 |
43 | const Button = React.forwardRef(
44 | ({ className, variant, size, asChild = false, ...props }, ref) => {
45 | const Comp = asChild ? Slot : "button";
46 | return (
47 |
52 | );
53 | }
54 | );
55 | Button.displayName = "Button";
56 |
57 | export { Button, buttonVariants };
58 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/dialog.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 | import * as DialogPrimitive from "@radix-ui/react-dialog"
3 | import { Cross2Icon } from "@radix-ui/react-icons"
4 |
5 | import { cn } from "@/lib/utils"
6 |
7 | const Dialog = DialogPrimitive.Root
8 |
9 | const DialogTrigger = DialogPrimitive.Trigger
10 |
11 | const DialogPortal = (props: DialogPrimitive.DialogPortalProps) => (
12 |
13 | );
14 |
15 | const DialogOverlay = React.forwardRef<
16 | React.ElementRef,
17 | React.ComponentPropsWithoutRef
18 | >(({ className, ...props }, ref) => (
19 |
27 | ))
28 | DialogOverlay.displayName = DialogPrimitive.Overlay.displayName
29 |
30 | const DialogContent = React.forwardRef<
31 | React.ElementRef,
32 | React.ComponentPropsWithoutRef
33 | >(({ className, children, ...props }, ref) => (
34 |
35 |
36 |
44 | {children}
45 |
46 |
47 | Close
48 |
49 |
50 |
51 | ))
52 | DialogContent.displayName = DialogPrimitive.Content.displayName
53 |
54 | const DialogHeader = ({
55 | className,
56 | ...props
57 | }: React.HTMLAttributes) => (
58 |
65 | )
66 | DialogHeader.displayName = "DialogHeader"
67 |
68 | const DialogFooter = ({
69 | className,
70 | ...props
71 | }: React.HTMLAttributes) => (
72 |
79 | )
80 | DialogFooter.displayName = "DialogFooter"
81 |
82 | const DialogTitle = React.forwardRef<
83 | React.ElementRef,
84 | React.ComponentPropsWithoutRef
85 | >(({ className, ...props }, ref) => (
86 |
94 | ))
95 | DialogTitle.displayName = DialogPrimitive.Title.displayName
96 |
97 | const DialogDescription = React.forwardRef<
98 | React.ElementRef,
99 | React.ComponentPropsWithoutRef
100 | >(({ className, ...props }, ref) => (
101 |
106 | ))
107 | DialogDescription.displayName = DialogPrimitive.Description.displayName
108 |
109 | export {
110 | Dialog,
111 | DialogTrigger,
112 | DialogContent,
113 | DialogHeader,
114 | DialogFooter,
115 | DialogTitle,
116 | DialogDescription,
117 | }
118 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 |
3 | import { cn } from "@/lib/utils";
4 |
5 | export interface InputProps
6 | extends React.InputHTMLAttributes {}
7 |
8 | const Input = React.forwardRef(
9 | ({ className, type, ...props }, ref) => {
10 | return (
11 |
20 | );
21 | }
22 | );
23 | Input.displayName = "Input";
24 |
25 | export { Input };
26 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/label.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 | import * as LabelPrimitive from "@radix-ui/react-label"
3 | import { cva, type VariantProps } from "class-variance-authority"
4 |
5 | import { cn } from "@/lib/utils"
6 |
7 | const labelVariants = cva(
8 | "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70"
9 | )
10 |
11 | const Label = React.forwardRef<
12 | React.ElementRef,
13 | React.ComponentPropsWithoutRef &
14 | VariantProps
15 | >(({ className, ...props }, ref) => (
16 |
21 | ))
22 | Label.displayName = LabelPrimitive.Root.displayName
23 |
24 | export { Label }
25 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/select.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 | import { CaretSortIcon, CheckIcon } from "@radix-ui/react-icons"
3 | import * as SelectPrimitive from "@radix-ui/react-select"
4 |
5 | import { cn } from "@/lib/utils"
6 |
7 | const Select = SelectPrimitive.Root
8 |
9 | const SelectGroup = SelectPrimitive.Group
10 |
11 | const SelectValue = SelectPrimitive.Value
12 |
13 | const SelectTrigger = React.forwardRef<
14 | React.ElementRef,
15 | React.ComponentPropsWithoutRef
16 | >(({ className, children, ...props }, ref) => (
17 |
25 | {children}
26 |
27 |
28 |
29 |
30 | ))
31 | SelectTrigger.displayName = SelectPrimitive.Trigger.displayName
32 |
33 | const SelectContent = React.forwardRef<
34 | React.ElementRef,
35 | React.ComponentPropsWithoutRef
36 | >(({ className, children, position = "popper", ...props }, ref) => (
37 |
38 |
49 |
56 | {children}
57 |
58 |
59 |
60 | ))
61 | SelectContent.displayName = SelectPrimitive.Content.displayName
62 |
63 | const SelectLabel = React.forwardRef<
64 | React.ElementRef,
65 | React.ComponentPropsWithoutRef
66 | >(({ className, ...props }, ref) => (
67 |
72 | ))
73 | SelectLabel.displayName = SelectPrimitive.Label.displayName
74 |
75 | const SelectItem = React.forwardRef<
76 | React.ElementRef,
77 | React.ComponentPropsWithoutRef
78 | >(({ className, children, ...props }, ref) => (
79 |
87 |
88 |
89 |
90 |
91 |
92 | {children}
93 |
94 | ))
95 | SelectItem.displayName = SelectPrimitive.Item.displayName
96 |
97 | const SelectSeparator = React.forwardRef<
98 | React.ElementRef,
99 | React.ComponentPropsWithoutRef
100 | >(({ className, ...props }, ref) => (
101 |
106 | ))
107 | SelectSeparator.displayName = SelectPrimitive.Separator.displayName
108 |
109 | export {
110 | Select,
111 | SelectGroup,
112 | SelectValue,
113 | SelectTrigger,
114 | SelectContent,
115 | SelectLabel,
116 | SelectItem,
117 | SelectSeparator,
118 | }
119 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/tabs.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import * as TabsPrimitive from "@radix-ui/react-tabs";
3 |
4 | import { cn } from "@/lib/utils";
5 |
6 | const Tabs = TabsPrimitive.Root;
7 |
8 | const TabsList = React.forwardRef<
9 | React.ElementRef,
10 | React.ComponentPropsWithoutRef
11 | >(({ className, ...props }, ref) => (
12 |
20 | ));
21 | TabsList.displayName = TabsPrimitive.List.displayName;
22 |
23 | const TabsTrigger = React.forwardRef<
24 | React.ElementRef,
25 | React.ComponentPropsWithoutRef
26 | >(({ className, ...props }, ref) => (
27 |
35 | ));
36 | TabsTrigger.displayName = TabsPrimitive.Trigger.displayName;
37 |
38 | const TabsContent = React.forwardRef<
39 | React.ElementRef,
40 | React.ComponentPropsWithoutRef
41 | >(({ className, ...props }, ref) => (
42 |
50 | ));
51 | TabsContent.displayName = TabsPrimitive.Content.displayName;
52 |
53 | export { Tabs, TabsList, TabsTrigger, TabsContent };
54 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/textarea.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 |
3 | import { cn } from "@/lib/utils";
4 |
5 | export interface TextareaProps
6 | extends React.TextareaHTMLAttributes {}
7 |
8 | const Textarea = React.forwardRef(
9 | ({ className, ...props }, ref) => {
10 | return (
11 |
19 | );
20 | }
21 | );
22 | Textarea.displayName = "Textarea";
23 |
24 | export { Textarea };
25 |
--------------------------------------------------------------------------------
/dashboard/src/components/ui/tooltip.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 | import * as TooltipPrimitive from "@radix-ui/react-tooltip"
3 |
4 | import { cn } from "@/lib/utils"
5 |
6 | const TooltipProvider = TooltipPrimitive.Provider
7 |
8 | const Tooltip = TooltipPrimitive.Root
9 |
10 | const TooltipTrigger = TooltipPrimitive.Trigger
11 |
12 | const TooltipContent = React.forwardRef<
13 | React.ElementRef,
14 | React.ComponentPropsWithoutRef
15 | >(({ className, sideOffset = 4, ...props }, ref) => (
16 |
25 | ))
26 | TooltipContent.displayName = TooltipPrimitive.Content.displayName
27 |
28 | export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
29 |
--------------------------------------------------------------------------------
/dashboard/src/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import { type ClassValue, clsx } from "clsx"
2 | import { twMerge } from "tailwind-merge"
3 |
4 | export function cn(...inputs: ClassValue[]) {
5 | return twMerge(clsx(inputs))
6 | }
7 |
--------------------------------------------------------------------------------
/dashboard/src/requests/configurations.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios, { AxiosError, AxiosResponse } from "axios";
5 | import { z } from "zod";
6 | import { min } from "lodash";
7 | import toast from "react-hot-toast";
8 |
9 | const schema = z.array(
10 | z
11 | .object({
12 | value: z.any(),
13 | revision: z.number().int(),
14 | creation_timestamp: z.number().nullable(),
15 | publication_timestamp: z.number().nullable(),
16 | acknowledgment_timestamp: z.number().nullable(),
17 | success: z.boolean().nullable(),
18 | })
19 | .transform((data) => ({
20 | value: data.value,
21 | revision: data.revision,
22 | creationTimestamp: data.creation_timestamp,
23 | publicationTimestamp: data.publication_timestamp,
24 | acknowledgmentTimestamp: data.acknowledgment_timestamp,
25 | success: data.success,
26 | }))
27 | );
28 |
29 | export type ConfigurationsType = z.infer;
30 |
31 | async function getSinglePage(
32 | url: string,
33 | accessToken: string,
34 | maxRevision: number | undefined,
35 | logoutUser: () => void
36 | ): Promise {
37 | const fullUrl =
38 | process.env.NEXT_PUBLIC_SERVER_URL +
39 | url +
40 | "?direction=previous" +
41 | (maxRevision !== undefined ? `&revision=${maxRevision}` : "");
42 |
43 | return await axios
44 | .get(fullUrl, {
45 | headers: {
46 | Authorization: `Bearer ${accessToken}`,
47 | },
48 | })
49 | .then((res: AxiosResponse) => schema.parse(res.data))
50 | .catch((err: AxiosError) => {
51 | console.error(`Error while fetching url ${url}: ${err}`);
52 | if (err.response?.status === 401) {
53 | toast("Session expired", { icon: "🔑" });
54 | logoutUser();
55 | window.location.reload();
56 | } else if (err.response?.status.toString().startsWith("5")) {
57 | toast("Server error", { icon: "🔥" });
58 | logoutUser();
59 | window.location.reload();
60 | } else {
61 | toast("Client error", { icon: "❓" });
62 | logoutUser();
63 | window.location.reload();
64 | }
65 | return undefined;
66 | });
67 | }
68 |
69 | async function fetcher(
70 | url: string,
71 | accessToken: string | undefined,
72 | logoutUser: () => void
73 | ): Promise {
74 | if (!accessToken) {
75 | throw new Error("Not authorized!");
76 | }
77 |
78 | let data: ConfigurationsType = [];
79 |
80 | while (1) {
81 | let newData = await getSinglePage(
82 | url,
83 | accessToken,
84 | min(data.map((d) => d.revision)),
85 | logoutUser
86 | );
87 | if (newData === undefined) {
88 | return undefined;
89 | }
90 | data = [...data, ...newData];
91 | if (newData.length < 64) {
92 | break;
93 | }
94 | }
95 |
96 | return data.sort((a, b) => b.revision - a.revision);
97 | }
98 |
99 | export function useConfigurations(
100 | accessToken: string | undefined,
101 | logoutUser: () => void,
102 | networkIdentifier: string,
103 | sensorIdentifier: string
104 | ) {
105 | const { data, mutate } = useSWR(
106 | [
107 | `/networks/${networkIdentifier}/sensors/${sensorIdentifier}/configurations`,
108 | accessToken,
109 | ],
110 | ([url, accessToken]) => fetcher(url, accessToken, logoutUser)
111 | );
112 |
113 | const createConfigRevision = async (
114 | value: Record
115 | ): Promise => {
116 | const { data } = await axios.post(
117 | `${process.env.NEXT_PUBLIC_SERVER_URL}/networks/${networkIdentifier}/sensors/${sensorIdentifier}/configurations`,
118 | value,
119 | {
120 | headers: {
121 | Authorization: `Bearer ${accessToken}`,
122 | },
123 | }
124 | );
125 | mutate((prevData: ConfigurationsType | undefined) => [
126 | {
127 | revision: data.revision, // @ts-ignore
128 | value: value,
129 | creationTimestamp: null,
130 | publicationTimestamp: null,
131 | acknowledgmentTimestamp: null,
132 | success: null,
133 | },
134 | ...(prevData || []),
135 | ]);
136 | };
137 |
138 | return {
139 | configurationsData: data,
140 | createConfigRevision,
141 | };
142 | }
143 |
--------------------------------------------------------------------------------
/dashboard/src/requests/measurements-aggregation.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios, { AxiosError, AxiosResponse } from "axios";
5 | import { z } from "zod";
6 | import toast from "react-hot-toast";
7 |
8 | const schema = z.record(
9 | z.string(),
10 | z.array(
11 | z
12 | .object({
13 | average: z.number(),
14 | bucket_timestamp: z.number(),
15 | })
16 | .transform((obj) => ({
17 | average: obj.average,
18 | bucketTimestamp: obj.bucket_timestamp,
19 | }))
20 | )
21 | );
22 |
23 | export type MeasurementsAggregationType = z.infer;
24 |
25 | async function fetcher(
26 | url: string,
27 | accessToken: string | undefined,
28 | logoutUser: () => void
29 | ): Promise {
30 | if (!accessToken) {
31 | return undefined;
32 | }
33 |
34 | return await axios
35 | .get(`${process.env.NEXT_PUBLIC_SERVER_URL}${url}`, {
36 | headers: {
37 | Authorization: `Bearer ${accessToken}`,
38 | },
39 | })
40 | .then((res: AxiosResponse) => schema.parse(res.data))
41 | .catch((err: AxiosError) => {
42 | console.error(`Error while fetching url ${url}: ${err}`);
43 | if (err.response?.status === 401) {
44 | toast("Session expired", { icon: "🔑" });
45 | logoutUser();
46 | window.location.reload();
47 | } else if (err.response?.status.toString().startsWith("5")) {
48 | toast("Server error", { icon: "🔥" });
49 | logoutUser();
50 | window.location.reload();
51 | } else {
52 | toast("Client error", { icon: "❓" });
53 | logoutUser();
54 | window.location.reload();
55 | }
56 | return undefined;
57 | });
58 | }
59 |
60 | export function useMeasurementsAggregation(
61 | accessToken: string | undefined,
62 | logoutUser: () => void,
63 | networkIdentifier: string,
64 | sensorIdentifier: string
65 | ) {
66 | const { data } = useSWR(
67 | [
68 | `/networks/${networkIdentifier}/sensors/${sensorIdentifier}/measurements?aggregate=true`,
69 | accessToken,
70 | ],
71 | ([url, accessToken]) => fetcher(url, accessToken, logoutUser)
72 | );
73 |
74 | return {
75 | measurementsAggregationData: data,
76 | };
77 | }
78 |
--------------------------------------------------------------------------------
/dashboard/src/requests/networks.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios, { AxiosError, AxiosResponse } from "axios";
5 | import { z } from "zod";
6 | import toast from "react-hot-toast";
7 |
8 | const schema = z.array(
9 | z
10 | .object({
11 | network_identifier: z.string(),
12 | network_name: z.string(),
13 | })
14 | .transform((data) => ({
15 | identifier: data.network_identifier,
16 | name: data.network_name,
17 | }))
18 | );
19 |
20 | export type NetworksType = z.infer;
21 |
22 | async function fetcher(
23 | url: string,
24 | accessToken: string | undefined,
25 | logoutUser: () => void
26 | ): Promise {
27 | if (!accessToken) {
28 | return undefined;
29 | }
30 |
31 | return await axios
32 | .get(`${process.env.NEXT_PUBLIC_SERVER_URL}${url}`, {
33 | headers: {
34 | Authorization: `Bearer ${accessToken}`,
35 | },
36 | })
37 | .then((res: AxiosResponse) => schema.parse(res.data))
38 | .catch((err: AxiosError) => {
39 | console.error(`Error while fetching url ${url}: ${err}`);
40 | if (err.response?.status === 401) {
41 | toast("Session expired", { icon: "🔑" });
42 | logoutUser();
43 | window.location.reload();
44 | } else if (err.response?.status.toString().startsWith("5")) {
45 | toast("Server error", { icon: "🔥" });
46 | logoutUser();
47 | window.location.reload();
48 | } else {
49 | toast("Client error", { icon: "❓" });
50 | logoutUser();
51 | window.location.reload();
52 | }
53 | return undefined;
54 | });
55 | }
56 |
57 | export function useNetworks(
58 | accessToken: string | undefined,
59 | logoutUser: () => void
60 | ) {
61 | const { data, mutate } = useSWR(
62 | ["/networks", accessToken],
63 | ([url, accessToken]) => fetcher(url, accessToken, logoutUser)
64 | );
65 |
66 | const createNetwork = async (networkName: string) => {
67 | const { data } = await axios.post(
68 | `${process.env.NEXT_PUBLIC_SERVER_URL}/networks`,
69 | {
70 | network_name: networkName,
71 | },
72 | {
73 | headers: {
74 | Authorization: `Bearer ${accessToken}`,
75 | },
76 | }
77 | );
78 | const networkData = schema.parse([{ ...data, network_name: networkName }]);
79 | mutate((prevData: NetworksType | undefined) => [
80 | ...(prevData || []),
81 | ...networkData,
82 | ]);
83 | };
84 |
85 | return {
86 | networksData: data,
87 | createNetwork,
88 | };
89 | }
90 |
--------------------------------------------------------------------------------
/dashboard/src/requests/sensors.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios, { AxiosError, AxiosResponse } from "axios";
5 | import { z } from "zod";
6 | import toast from "react-hot-toast";
7 |
8 | const schema = z.array(
9 | z
10 | .object({
11 | sensor_identifier: z.string(),
12 | sensor_name: z.string(),
13 | })
14 | .transform((data) => ({
15 | identifier: data.sensor_identifier,
16 | name: data.sensor_name,
17 | }))
18 | );
19 |
20 | export type SensorsType = z.infer;
21 |
22 | async function fetcher(
23 | url: string,
24 | accessToken: string | undefined,
25 | logoutUser: () => void
26 | ): Promise {
27 | if (!accessToken) {
28 | return undefined;
29 | }
30 |
31 | return await axios
32 | .get(`${process.env.NEXT_PUBLIC_SERVER_URL}${url}`, {
33 | headers: {
34 | Authorization: `Bearer ${accessToken}`,
35 | },
36 | })
37 | .then((res: AxiosResponse) => schema.parse(res.data))
38 | .catch((err: AxiosError) => {
39 | console.error(`Error while fetching url ${url}: ${err}`);
40 | if (err.response?.status === 401) {
41 | toast("Session expired", { icon: "🔑" });
42 | logoutUser();
43 | window.location.reload();
44 | } else if (err.response?.status.toString().startsWith("5")) {
45 | toast("Server error", { icon: "🔥" });
46 | logoutUser();
47 | window.location.reload();
48 | } else {
49 | toast("Client error", { icon: "❓" });
50 | logoutUser();
51 | window.location.reload();
52 | }
53 | return undefined;
54 | });
55 | }
56 |
57 | export function useSensors(
58 | accessToken: string | undefined,
59 | logoutUser: () => void,
60 | networkIdentifier: string
61 | ) {
62 | const { data, mutate } = useSWR(
63 | [`/networks/${networkIdentifier}/sensors`, accessToken],
64 | ([url, accessToken]) => fetcher(url, accessToken, logoutUser)
65 | );
66 |
67 | const createSensor = async (sensorName: string): Promise => {
68 | const { data } = await axios.post(
69 | `${process.env.NEXT_PUBLIC_SERVER_URL}/networks/${networkIdentifier}/sensors`,
70 | {
71 | sensor_name: sensorName,
72 | },
73 | {
74 | headers: {
75 | Authorization: `Bearer ${accessToken}`,
76 | },
77 | }
78 | );
79 | const newData = schema.parse([{ ...data, sensor_name: sensorName }]);
80 | mutate((prevData: SensorsType | undefined) => [
81 | ...(prevData || []),
82 | ...newData,
83 | ]);
84 | return newData[0].identifier;
85 | };
86 |
87 | const updateSensor = async (
88 | sensorIdentifier: string,
89 | newSensorName: string
90 | ): Promise => {
91 | await axios.put(
92 | `${process.env.NEXT_PUBLIC_SERVER_URL}/networks/${networkIdentifier}/sensors/${sensorIdentifier}`,
93 | {
94 | sensor_name: newSensorName,
95 | },
96 | {
97 | headers: {
98 | Authorization: `Bearer ${accessToken}`,
99 | },
100 | }
101 | );
102 | mutate((prevData: SensorsType | undefined) =>
103 | (prevData || []).map((sensor) => {
104 | if (sensor.identifier === sensorIdentifier) {
105 | return { ...sensor, name: newSensorName };
106 | }
107 | return sensor;
108 | })
109 | );
110 | };
111 |
112 | return {
113 | sensorsData: data,
114 | createSensor,
115 | updateSensor,
116 | };
117 | }
118 |
--------------------------------------------------------------------------------
/dashboard/src/requests/status.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios, { AxiosError, AxiosResponse } from "axios";
5 | import { z } from "zod";
6 |
7 | const schema = z
8 | .object({
9 | environment: z.string(),
10 | commit_sha: z.string(),
11 | branch_name: z.string(),
12 | start_timestamp: z.number(),
13 | mqtt_hostname: z.string(),
14 | mqtt_port: z.number(),
15 | })
16 | .transform((data) => ({
17 | environment: data.environment,
18 | commitSha: data.commit_sha,
19 | branchName: data.branch_name,
20 | startTimestamp: data.start_timestamp,
21 | mqttHostname: data.mqtt_hostname,
22 | mqttPort: data.mqtt_port,
23 | }));
24 |
25 | export type StatusType = z.infer;
26 |
27 | async function fetcher(url: string): Promise {
28 | return await axios
29 | .get(`${process.env.NEXT_PUBLIC_SERVER_URL}${url}`)
30 | .then((res: AxiosResponse) => schema.parse(res.data))
31 | .catch((err: AxiosError) => {
32 | console.error(`Error while fetching url ${url}: ${err}`);
33 |
34 | // redirect to /offline
35 | window.location.href = "/offline";
36 | return undefined;
37 | });
38 | }
39 |
40 | export function useStatus() {
41 | const { data } = useSWR(`/status`, (url) => fetcher(url));
42 |
43 | return data;
44 | }
45 |
--------------------------------------------------------------------------------
/dashboard/src/requests/user.ts:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import useSWR from "swr";
4 | import axios from "axios";
5 | import { z } from "zod";
6 | import Cookies from "js-cookie";
7 |
8 | const userDataSchema = z
9 | .object({
10 | user_identifier: z.string(),
11 | access_token: z.string(),
12 | })
13 | .transform((data) => ({
14 | userIdentifier: data.user_identifier,
15 | accessToken: data.access_token,
16 | }));
17 |
18 | export type UserDataType = z.infer;
19 |
20 | async function userFetcher(): Promise {
21 | const userIdentifier = Cookies.get("userIdentifier");
22 | const accessToken = Cookies.get("accessToken");
23 |
24 | if (userIdentifier && accessToken) {
25 | return {
26 | userIdentifier,
27 | accessToken,
28 | };
29 | }
30 |
31 | throw new Error("Not authorized!");
32 | }
33 |
34 | export function useUser() {
35 | const { data: userData, error, mutate } = useSWR("userData", userFetcher);
36 |
37 | const loginUser = async (username: string, password: string) => {
38 | const { data } = await axios.post(
39 | `${process.env.NEXT_PUBLIC_SERVER_URL}/authentication`,
40 | {
41 | user_name: username,
42 | password,
43 | }
44 | );
45 | const userData = userDataSchema.parse(data);
46 | Cookies.set("userIdentifier", userData.userIdentifier);
47 | Cookies.set("accessToken", userData.accessToken);
48 | mutate(userData);
49 | };
50 |
51 | const signupUser = async (username: string, password: string) => {
52 | const { data } = await axios.post(
53 | `${process.env.NEXT_PUBLIC_SERVER_URL}/users`,
54 | {
55 | user_name: username,
56 | password,
57 | }
58 | );
59 | const userData = userDataSchema.parse(data);
60 | Cookies.set("userIdentifier", userData.userIdentifier);
61 | Cookies.set("accessToken", userData.accessToken);
62 | mutate(userData);
63 | };
64 |
65 | return {
66 | userData,
67 | userDataIsloading: !userData && !error,
68 | logoutUser: () => {
69 | Cookies.remove("userIdentifier");
70 | Cookies.remove("accessToken");
71 | mutate(undefined);
72 | },
73 | loginUser,
74 | signupUser,
75 | };
76 | }
77 |
--------------------------------------------------------------------------------
/dashboard/tailwind.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | module.exports = {
3 | darkMode: ["class"],
4 | content: ["./src/**/*.{ts,tsx}"],
5 | theme: {
6 | container: {
7 | center: true,
8 | padding: "2rem",
9 | screens: {
10 | "2xl": "1400px",
11 | },
12 | },
13 | extend: {
14 | colors: {
15 | border: "hsl(var(--border))",
16 | input: "hsl(var(--input))",
17 | ring: "hsl(var(--ring))",
18 | background: "hsl(var(--background))",
19 | foreground: "hsl(var(--foreground))",
20 | primary: {
21 | DEFAULT: "hsl(var(--primary))",
22 | foreground: "hsl(var(--primary-foreground))",
23 | },
24 | secondary: {
25 | DEFAULT: "hsl(var(--secondary))",
26 | foreground: "hsl(var(--secondary-foreground))",
27 | },
28 | destructive: {
29 | DEFAULT: "hsl(var(--destructive))",
30 | foreground: "hsl(var(--destructive-foreground))",
31 | },
32 | muted: {
33 | DEFAULT: "hsl(var(--muted))",
34 | foreground: "hsl(var(--muted-foreground))",
35 | },
36 | accent: {
37 | DEFAULT: "hsl(var(--accent))",
38 | foreground: "hsl(var(--accent-foreground))",
39 | },
40 | popover: {
41 | DEFAULT: "hsl(var(--popover))",
42 | foreground: "hsl(var(--popover-foreground))",
43 | },
44 | card: {
45 | DEFAULT: "hsl(var(--card))",
46 | foreground: "hsl(var(--card-foreground))",
47 | },
48 | "slate-150": "#e9eef4",
49 | "slate-250": "rgb(215, 223, 233)",
50 | "slate-350": "#afbccd",
51 |
52 | /*blue: {
53 | 50: "#f1f5fd",
54 | 100: "#dfe9fa",
55 | 150: "#d3e0f8",
56 | 200: "#c7d8f6",
57 | 300: "#a0bff0",
58 | 400: "#739de7",
59 | 500: "#4a76de",
60 | 600: "#3d60d3",
61 | 700: "#344dc1",
62 | 800: "#30409d",
63 | 900: "#2b397d",
64 | 950: "#1e254d",
65 | },*/
66 | /*blue: {
67 | 100: "#a5e1ff",
68 | 200: "#4bc3ff",
69 | 300: "#00a0f1",
70 | 400: "#006597",
71 | 500: "#00293d",
72 | 600: "#002131",
73 | 700: "#001825",
74 | 800: "#001018",
75 | 900: "#00080c",
76 | },
77 | red: {
78 | 100: "#f7d2d2",
79 | 200: "#efa5a5",
80 | 300: "#e77878",
81 | 400: "#df4b4b",
82 | 500: "#d02525",
83 | 600: "#a61d1d",
84 | 700: "#7d1616",
85 | 800: "#530f0f",
86 | 900: "#2a0707",
87 | },
88 | orange: {
89 | 100: "#ffe5ca",
90 | 200: "#ffcc95",
91 | 300: "#ffb260",
92 | 400: "#ff982b",
93 | 500: "#f77f00",
94 | 600: "#c46500",
95 | 700: "#934c00",
96 | 800: "#623300",
97 | 900: "#311900",
98 | },
99 | yellow: {
100 | 100: "#fff5e3",
101 | 200: "#ffecc6",
102 | 300: "#ffe2aa",
103 | 400: "#fed98d",
104 | 500: "#fecf72",
105 | 600: "#feb728",
106 | 700: "#db9301",
107 | 800: "#926201",
108 | 900: "#493100",
109 | },
110 | eggshell: {
111 | 100: "#fbfaf2",
112 | 200: "#f8f4e6",
113 | 300: "#f4efd9",
114 | 400: "#f1e9cc",
115 | 500: "#ede4bf",
116 | 600: "#dac77d",
117 | 700: "#c7ab3a",
118 | 800: "#867226",
119 | 900: "#433913",
120 | },*/
121 | },
122 | borderRadius: {
123 | lg: "var(--radius)",
124 | md: "calc(var(--radius) - 2px)",
125 | sm: "calc(var(--radius) - 4px)",
126 | },
127 | keyframes: {
128 | "accordion-down": {
129 | from: { height: 0 },
130 | to: { height: "var(--radix-accordion-content-height)" },
131 | },
132 | "accordion-up": {
133 | from: { height: "var(--radix-accordion-content-height)" },
134 | to: { height: 0 },
135 | },
136 | },
137 | animation: {
138 | "accordion-down": "accordion-down 0.2s ease-out",
139 | "accordion-up": "accordion-up 0.2s ease-out",
140 | },
141 | },
142 | },
143 | plugins: [require("tailwindcss-animate")],
144 | };
145 |
--------------------------------------------------------------------------------
/dashboard/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "noEmit": true,
9 | "esModuleInterop": true,
10 | "module": "esnext",
11 | "moduleResolution": "bundler",
12 | "resolveJsonModule": true,
13 | "isolatedModules": true,
14 | "jsx": "preserve",
15 | "incremental": true,
16 | "plugins": [
17 | {
18 | "name": "next"
19 | }
20 | ],
21 | "paths": {
22 | "@/*": ["./src/*"]
23 | }
24 | },
25 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
26 | "exclude": ["node_modules"]
27 | }
28 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | server:
3 | build:
4 | context: ./server
5 | args:
6 | commit_sha: ${COMMIT_SHA:-development}
7 | branch_name: ${BRANCH_NAME:-development}
8 | environment:
9 | - ENVIRONMENT=development
10 | - POSTGRESQL_HOSTNAME=postgres
11 | - POSTGRESQL_PORT=5432
12 | - POSTGRESQL_IDENTIFIER=postgres
13 | - POSTGRESQL_PASSWORD=12345678
14 | - POSTGRESQL_DATABASE=database
15 | - MQTT_HOSTNAME=mosquitto
16 | - MQTT_PORT=1883
17 | - MQTT_IDENTIFIER=server
18 | - MQTT_PASSWORD=password
19 | - COMMIT_SHA=${COMMIT_SHA:-development}
20 | - BRANCH_NAME=${BRANCH_NAME:-development}
21 | volumes:
22 | - ./server:/server
23 | ports:
24 | - "8421:8421"
25 | depends_on:
26 | postgres:
27 | condition: service_healthy
28 | mosquitto:
29 | condition: service_started
30 | command: bash -c "cd /server && poetry install --with dev && poetry run python -m scripts.initialize --populate && poetry run uvicorn app.main:app --host 0.0.0.0 --port 8421 --reload --reload-include './app/*.sql' --reload-exclude './tests/*' --reload-exclude='./scripts/*' --log-level debug"
31 |
32 | dashboard:
33 | build:
34 | context: ./dashboard
35 | args:
36 | NODE_VERSION: 20.6.1
37 | NEXT_PUBLIC_SERVER_URL: http://localhost:8421
38 | NEXT_PUBLIC_INSTANCE_TITLE: "Development Instance"
39 | NEXT_PUBLIC_BUILD_TIMESTAMP: ${NEXT_PUBLIC_BUILD_TIMESTAMP:-0}
40 | NEXT_PUBLIC_COMMIT_SHA: ${COMMIT_SHA:-development}
41 | NEXT_PUBLIC_BRANCH_NAME: ${BRANCH_NAME:-development}
42 | environment:
43 | - NODE_ENV=production
44 | - NODE_OPTIONS=--max-old-space-size=4096
45 | - NEXT_PUBLIC_SERVER_URL=http://localhost:8421
46 | ports:
47 | - "3000:3000"
48 | deploy:
49 | resources:
50 | limits:
51 | memory: 2G
52 | reservations:
53 | memory: 512M
54 | depends_on:
55 | - server
56 |
57 | postgres:
58 | image: timescale/timescaledb:latest-pg15
59 | environment:
60 | - POSTGRES_USER=postgres
61 | - POSTGRES_PASSWORD=12345678
62 | - POSTGRES_DB=database
63 | ports:
64 | - "5432:5432"
65 | volumes:
66 | - postgres_data:/var/lib/postgresql/data
67 | healthcheck:
68 | test: ["CMD-SHELL", "pg_isready -U postgres"]
69 | interval: 5s
70 | timeout: 5s
71 | retries: 5
72 |
73 | mosquitto:
74 | image: eclipse-mosquitto:latest
75 | ports:
76 | - "1883:1883"
77 | volumes:
78 | - ./server/tests/mosquitto.conf:/mosquitto/config/mosquitto.conf
79 |
80 | volumes:
81 | postgres_data:
82 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | Run the documentation with
2 |
3 | ```bash
4 | npm install
5 | npm run dev
6 | ```
7 |
8 | Create a production build with
9 |
10 | ```bash
11 | npm run build
12 | ```
13 |
--------------------------------------------------------------------------------
/docs/netlify.toml:
--------------------------------------------------------------------------------
1 | [build]
2 | base = "/docs"
3 | publish = "/out"
4 | command = "npm run build"
5 |
--------------------------------------------------------------------------------
/docs/next-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 | ///
3 |
4 | // NOTE: This file should not be edited
5 | // see https://nextjs.org/docs/basic-features/typescript for more information.
6 |
--------------------------------------------------------------------------------
/docs/next.config.js:
--------------------------------------------------------------------------------
1 | const withNextra = require("nextra")({
2 | theme: "nextra-theme-docs",
3 | themeConfig: "./theme.config.jsx",
4 | });
5 |
6 | module.exports = withNextra({
7 | images: {
8 | unoptimized: true,
9 | },
10 | output: "export",
11 | });
12 |
13 | // If you have other Next.js configurations, you can pass them as the parameter:
14 | // module.exports = withNextra({ /* other next.js config */ })
15 |
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "repository": "https://github.com/iterize/tenta",
3 | "scripts": {
4 | "dev": "next dev",
5 | "build": "next build"
6 | },
7 | "dependencies": {
8 | "autoprefixer": "^10.4.15",
9 | "next": "^13.4.19",
10 | "nextra": "^2.12.3",
11 | "nextra-theme-docs": "^2.12.3",
12 | "postcss": "^8.4.29",
13 | "react": "^18.2.0",
14 | "react-dom": "^18.2.0",
15 | "sharp": "^0.32.6",
16 | "tailwindcss": "^3.3.3"
17 | },
18 | "devDependencies": {
19 | "@types/node": "^20.6.0",
20 | "typescript": "^5.2.2"
21 | },
22 | "engines": {
23 | "node": ">=16"
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/docs/pages/_app.mdx:
--------------------------------------------------------------------------------
1 | import '../style.css';
2 |
3 | export default function App({ Component, pageProps }) {
4 | return (
5 | <>
6 |
7 | >
8 | );
9 |
10 | }
11 |
--------------------------------------------------------------------------------
/docs/pages/_meta.json:
--------------------------------------------------------------------------------
1 | {
2 | "index": {
3 | "display": "hidden"
4 | },
5 | "introduction": "Introduction",
6 | "-- Guides": {
7 | "type": "separator",
8 | "title": "Guides"
9 | },
10 | "overview": "System overview",
11 | "connect": "Connecting the first sensor",
12 | "next": "Next steps",
13 | "export": "Working with the data",
14 | "deployment": "Deployment",
15 | "-- Reference": {
16 | "type": "separator",
17 | "title": "Reference"
18 | },
19 | "mqtt": "MQTT interface",
20 | "database": {
21 | "title": "Database schema ↗",
22 | "href": "https://github.com/iterize/tenta/blob/main/server/schema.sql"
23 | },
24 | "http": {
25 | "title": "REST API ↗",
26 | "href": "https://bump.sh/empicano/doc/tenta"
27 | },
28 | "design": "Design decisions",
29 | "-- More": {
30 | "type": "separator",
31 | "title": "More"
32 | },
33 | "releases": {
34 | "title": "Release notes ↗",
35 | "href": "https://github.com/iterize/tenta/releases"
36 | },
37 | "issues": {
38 | "title": "Issue tracker ↗",
39 | "href": "https://github.com/iterize/tenta/issues"
40 | },
41 | "discussions": {
42 | "title": "Discussions ↗",
43 | "href": "https://github.com/iterize/tenta/discussions"
44 | },
45 | "contribute": "How to contribute",
46 | "roadmap": "Roadmap",
47 | "community": "Community projects"
48 | }
--------------------------------------------------------------------------------
/docs/pages/community.mdx:
--------------------------------------------------------------------------------
1 | # Community projects
2 |
3 | We love seeing people build cool things with Tenta! These are some of the projects we know about. If you know of a project that's missing from this list, don't hesitate to open a pull request 🚀
4 |
5 | - [Ivy](https://github.com/tum-esm/ivy): A Python template for a data acquisition system automating the operation of a sensor node. This was created based on the experience gained from the development of the Hermes (see below) and other data acquisition systems.
6 | - [Tenta Client](https://github.com/tum-esm/tenta-client): A Python client based on paho-mqtt implementing the full Tenta API.
7 | - [Hermes](https://github.com/tum-esm/hermes): CO2 sensor network developed within the [ICOS Cities PAUL project](https://www.icos-cp.eu/projects/icos-cities) at [Professorship of Environmental Sensing and Modeling at TUM](https://www.ee.cit.tum.de/en/esm/home/).
8 |
--------------------------------------------------------------------------------
/docs/pages/connect.mdx:
--------------------------------------------------------------------------------
1 | import { Callout } from "nextra/components";
2 |
3 | Let's start as simple as possible. In the following, we will learn how to run Tenta locally. We are then going to write a short Python script that simulates our first sensor. After we connect this script with Tenta, we will be able to see the sensor's measurements in the dashboard.
4 |
5 |
6 | You don't need any special hardware to follow this tutorial.
7 |
8 |
9 | ## Start Tenta locally
10 |
11 | Tenta consists of a server and a dashboard. Sensors communicate with the server via an intermediate MQTT broker. Data is stored in a PostgreSQL+TimescaleDB database.
12 |
13 | For this tutorial, we'll use a shell script that starts all these components for us.
14 |
15 | If you haven't already, clone the repository from GitHub with `git clone https://github.com/iterize/tenta.git`. Then you can follow the instructions in the [contributing guide](contribute#dashboard) to start the server and the dashboard locally.
16 |
17 | Our `./scripts/develop` script already initialized Tenta with some example data. You can log into the dashboard with the username `happy-un1c0rn` and the very secure password `12345678`. Don't hesitate to have a look around!
18 |
19 | ## Our first sensor
20 |
21 | Now that Tenta is running, let's connect our first sensor. Usually, our sensors and Tenta would run on different devices, but for simplicity, we use our local machine for both in this tutorial.
22 |
23 |
24 | We use Python together with the [paho-mqtt](https://github.com/eclipse/paho.mqtt.python) MQTT library in this example. Your sensors can use any other programming language that has a MQTT client library (which is most). There is a [Tenta client library for Python](https://github.com/tum-esm/tenta-client) maintained by the community.
25 |
26 |
27 | For now, we only want our sensor to transmit some example measurements to Tenta at regular intervals. We are going to look at some more of what Tenta can do in the next chapter.
28 |
29 | Let's see the code!
30 |
31 | ```python
32 | import json
33 | import math
34 | import paho.mqtt.client as mqtt
35 | import time
36 |
37 |
38 | # Connect to the MQTT broker (here: our development instance)
39 | client = mqtt.Client()
40 | client.connect("localhost", port=1883)
41 |
42 | while True:
43 | # The current time as Unix timestamp
44 | timestamp = time.time()
45 | # Create a test measurement with temperature and humidity values
46 | measurement = {
47 | "value": {
48 | "temperature": math.sin(timestamp / (60*60*24)),
49 | "humidity": timestamp % (60*60*24*7),
50 | },
51 | "timestamp": timestamp,
52 | }
53 | # Publish our measurement
54 | client.publish(
55 | topic="measurements/81bf7042-e20f-4a97-ac44-c15853e3618f",
56 | payload=json.dumps([measurement]).encode(),
57 | qos=1,
58 | )
59 | # Wait a little while
60 | time.sleep(5)
61 | ```
62 |
63 |
64 | All of the examples in this documentation are self-contained and can be run as-is.
65 |
66 |
67 | Our sensor sends the measurements to the `measurements/81bf7042-e20f-4a97-ac44-c15853e3618f` topic. The `81bf7042-e20f-4a97-ac44-c15853e3618f` part is the sensor's identifier. You can find this identifier in the dashboard when you create a new sensor.
68 |
69 | Measurements are simple JSON documents. You can try to add or remove attributes, Tenta supports changing their format on the fly! Note that Tenta only supports numeric values.
70 |
71 | ## The results
72 |
73 | In the dashboard, you can see the measurements arriving in real-time now. Additionally, the dashboard shows charts of the measurement values over the last 4 weeks.
74 |
75 | That's it for this chapter. Well done! 🎉
76 |
--------------------------------------------------------------------------------
/docs/pages/contribute.mdx:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | We're very happy about any contributions to Tenta! ✨
4 |
5 | ## Development setup
6 |
7 | ### Server
8 |
9 | - Clone the repository and switch into the `server` directory
10 | - Install the Python version noted in `.python-version` (e.g. with `pyenv`)
11 | - Install the [poetry package manager](https://github.com/python-poetry/poetry)
12 | - Install the dependencies with `./scripts/setup`
13 | - Make sure that you have Docker installed
14 | - Start the server in development mode with `./scripts/develop`; This automatically spins up local PostgreSQL and Mosquitto instances with Docker; The server will be available at `http://localhost:8421`
15 | - You can run the tests with `./scripts/test` and format and lint the code with `./scripts/check`
16 |
17 | ### Dashboard
18 |
19 | - Start the server in development mode, as described above
20 | - Switch into the `dashboard` directory
21 | - Have [NodeJS](https://nodejs.org/en) version >= 16 installed on your machine
22 | - Install the dependencies with `npm install`
23 | - Start the dashboard in development mode with `npm run dev`; Use the username `happy-un1c0rn` and the password `12345678` to log in
24 | - You can modify `.env.development` to point your development frontend to a different server
25 | - The directory structure is explained by [NextJS](https://nextjs.org/) (version 13, app router)
26 |
27 | ### Documentation
28 |
29 | - Clone the repository and switch into the `docs` directory
30 | - Have [NodeJS](https://nodejs.org/en) version >= 16 installed on your machine
31 | - Install the dependencies with `npm install`
32 | - Start the documentation in development mode with `npm run dev`
33 | - The docs are built using [Nextra](https://nextra.site/) which is again based on [NextJS](https://nextjs.org/)
34 |
35 | ## Making a pull request
36 |
37 | You can create a draft pull request if your contribution is not yet ready to merge. Please check if your changes call for updates to the documentation! Changes to the server should be accompanied by tests.
38 |
--------------------------------------------------------------------------------
/docs/pages/deployment.mdx:
--------------------------------------------------------------------------------
1 | import { Callout } from "nextra/components";
2 |
3 | # Deployment
4 |
5 | ## Server
6 |
7 | Tenta's server is backed by PostgreSQL and an MQTT broker. During development and testing, these services are automatically spun up locally for you. In production, it's better to deploy them independently from the server.
8 |
9 | You can get started with a free MQTT broker from [HiveMQ](https://www.hivemq.com). TimescaleDB's license restricts other cloud providers from offering it feature-complete as a service. You can either deploy and manage it yourself or use TimescaleDB's own [TimescaleDB Cloud](https://www.timescale.com/cloud) (the first 30 days are free).
10 |
11 | When you have your PostgreSQL instance and the MQTT broker ready:
12 |
13 | - Clone the repository and switch into the server directory
14 | - Specify your environment variables in a `.env` file (see `.env.example`)
15 | - Initialize the database via `(set -a && source .env && ./scripts/initialize)`
16 | - Make sure that you have Docker installed
17 | - Build the Docker image with `./scripts/build`
18 |
19 | You can run this Docker image locally with `docker run -p 127.0.0.1:8421:8421 --env-file .env tenta` or deploy it to your cloud provider or choice (e.g. [DigitalOcean's App Platform](https://www.digitalocean.com/products/app-platform)).
20 |
21 | ## Dashboard
22 |
23 | _All of the following paths are relative to the `dashboard/` directory of this repository._
24 |
25 | The dashboard can be hosted on any web server that supports NodeJS applications. Some hosting options are [fly.io](https://fly.io/), [Vercel](https://vercel.com/), [Render](https://render.com/) or [DigitalOcean](https://www.digitalocean.com/).
26 |
27 | The file `.env.template` contains all available environment variables. The command `npm run build` creates a production build in the `out/` directory. This directory can be served by running `npm run start`.
28 |
29 |
30 |
31 | The dashboard might be hostable as a static site in the future. See https://github.com/iterize/tenta/issues/26. Anyone is welcome to contribute!
32 |
33 |
34 |
--------------------------------------------------------------------------------
/docs/pages/design.mdx:
--------------------------------------------------------------------------------
1 | # Design decisions
2 |
3 | This document captures important design/architectural decisions along with their context and consequences.
4 |
5 | ## [`0.1`] Delegation of processing and visualization
6 |
7 | Tenta follows the Unix philosophy and aims to be lightweight and composable. Tenta collects data from sensors and allows supervising and configuring them remotely. Tenta does not process data and does not visualize anything more elaborate than real-time line charts over raw data. Instead, we design Tenta's interfaces to work well with other tools and provide documentation about, e.g., how to get data out of Tenta to process it.
8 |
9 | The open-source ecosystem already has excellent tools for processing and visualization, e.g., polars and Grafana. By not reinventing the wheel here, we can focus on Tenta's core functionality. Additionally, processing and visualization vary immensely between projects, which means that we'd probably only end up with a very complex system that still doesn't work for everyone.
10 |
11 | Tenta stores the raw data it receives from the sensors and does not allow it to be modified. This means that Tenta can function as a source of truth. When we export and process data outside of Tenta, bugs or accidental deletions and modifications during processing do not affect the raw data.
12 |
13 | Furthermore, it's often not possible to analyze data in real time. We might want to try out and compare different processing methods, or we might want to reprocess all your data when we find a bug. The processing might depend on metadata or external sources that are only available at a later point in time, etc. Additionally, processing can sometimes take hours or days. By decoupling the processing, we can build arbitrarily complex processing pipelines without affecting Tenta's real-time functionality.
14 |
15 | ## [`0.1`] MQTT for communication
16 |
17 | Tenta uses MQTT to communicate with sensors. To configure sensors in real-time, communication needs to happen not only from the sensors to the server but also in the other direction. MQTT uses little bandwidth, which is important for sensors connected via the cellular network. MQTT is flexible and has a large ecosystem of tools and libraries. Going all in on a single protocol means that we can leverage features like retained messages and last will and testament, and it makes for a simpler and more robust system.
18 |
19 | LoRaWAN gateways can translate messages between the sensors and Tenta. Such gateways could also be implemented to translate from other protocols to MQTT and thus allow connecting sensors communicating with different protocols to a single Tenta instance.
20 |
21 | ## [`0.1`] Structure of measurements
22 |
23 | Measurements are stored in a single EAV table in the database. This table contains an `attribute` column (string) and a `value` column (double-precision floating-point). Measurements arrive as JSON documents at the server, which stores each key-value pair as a separate row. A single measurement consisting of values for temperature and humidity is stored as two rows in the database.
24 |
25 | This approach is simple and flexible. We do not have to define measurement schemas beforehand, and we can store different kinds of measurements in the same table. Additionally, measurements can evolve over time without downtime or changes to the database schema.
26 |
27 | In previous alpha versions, measurements were stored as JSON documents. However, this approach made it difficult to query and aggregate the data. Additionally, our processing pipeline had to deal with a multitude of different measurement formats, which added a lot of complexity. The EAV schema allows us to export, query, and aggregate the data in a structured way while keeping most of the flexibility of the JSON schema. Additionally, it allows us to show real-time charts and statistics on the dashboard.
28 |
29 | The downside of the EAV approach is that all measurements are stored as floating-point numbers. We can represent integers and booleans with doubles, but we cannot store strings or binary objects. We have not yet found this to be a problem in practice. We might be able to support strings and binary objects in the future with a `bytea` column in a separate table.
30 |
31 | ## [`0.1`] Configurations as JSON documents
32 |
33 | Configurations are JSON documents that are stored and relayed to the sensors without changes. This makes configurations highly flexible and allows them to evolve over time. Tenta does not validate configurations before relaying them to the sensors (except for the fact that they are valid JSON documents). Sensors should be implemented as self-sufficient systems that validate and accept/reject configurations themselves.
34 |
35 | What we need to know is whether a sensor has received a configuration and whether it could be implemented successfully. A structurally valid configuration is not guaranteed to be implemented successfully by a sensor: the MQTT broker might be offline, the sensor might have a bug in its firmware that makes the update fail, etc.
36 |
37 | Instead, Tenta implements an acknowledgment cycle. When a sensor receives a new configuration, it can reply with an acknowledgment stating whether the update was successful or not. Configurations are validated on the sensors, which is the only place where it can be done reliably.
38 |
39 | ## [`0.1`] Configuration revisions
40 |
41 | The server assigns each configuration a monotonically increasing revision number. This allows us to associate measurements and logs with the configuration that was active at the time. This is often important during analysis. Revision numbers are also used during the acknowledgment cycle to detect if a sensor has received a configuration and whether it could be implemented successfully.
42 |
43 | ## [`0.1`] PostgreSQL+TimescaleDB
44 |
45 | Tenta stores all data in a PostgreSQL database with the TimescaleDB extension. Apart from time series data like measurements, we need to store highly relational information, e.g., about users, networks, or session tokens.
46 |
47 | TimescaleDB allows us to optimize certain tables for time series data while still being able to use most of the features of PostgreSQL. Compared to other time-series databases (e.g., ClickHouse, InfluxDB), we can store time series and relational data in a single data store. This makes our setup simple and robust. I've heard of scaling issues with TimescaleDB; We have not yet encountered any and will only react to them when we do.
48 |
--------------------------------------------------------------------------------
/docs/pages/export.mdx:
--------------------------------------------------------------------------------
1 | import { Callout } from "nextra/components";
2 |
3 | # Working with the data
4 |
5 | Measurements make up the overwhelming majority of the data in the system. They must be flexible enough to allow for different sensors and measurement formats and must be able to evolve over time without downtime. At the same time, they must be efficient to store and query and structured enough for easy export and analysis.
6 |
7 | ## Messages, measurements, and data points
8 |
9 | Sensors send MQTT messages to the server. Each message contains one or more measurements. Measurements contain one or more data points, which consist of an attribute (e.g., `temperature`) and a value (e.g., `23.1`). Each data point is associated with the time Tenta received it, as well as the time it was measured and, optionally, the revision that was in use at the time.
10 |
11 | Values are sent as JSON numbers and stored as 64-bit double-precision floating-points. You cannot send strings or binary objects. This is a compromise between flexibility and efficiency that we've rarely found to be limiting in practice. Note that you can represent integers and booleans as floats.
12 |
13 | ## Evolving measurement formats
14 |
15 | You can freely evolve a sensor's measurement format by sending new or dropping old attributes. Changing a sensor's measurement format does not require changes to Tenta. Sensors can also use multiple different measurement formats at the same time. Similarly, sensors can send partial measurements, i.e., measurements that only contain a subset of attributes.
16 |
17 | Note that although measurements are very flexible, once they are stored in the database, they are immutable. You cannot rename attributes, but you can always add new attributes. If you need to update values retroactively, your best option is to do this during processing.
18 |
19 | ---
20 |
21 | ## Export
22 |
23 | The server stores measurements in the `measurement` table. This table has the following columns:
24 |
25 | - **`sensor_identifier`** (`UUID`): The identifier of the sensor that made the measurement.
26 | - **`attribute`** (`TEXT`): The name of the attribute that was measured.
27 | - **`value`** (`DOUBLE PRECISION`): The value of the measurement.
28 | - **`revision`** (`INT`): The revision of the configuration associated with the measurement.
29 | - **`creation_timestamp`** (`TIMESTAMPTZ`): The time at which the measurement was made.
30 | - **`receipt_timestamp`** (`TIMESTAMPTZ`): The time at which the server received the measurement.
31 |
32 | The most flexible way to process these measurements further is to download them locally. You can access the database from any programming language and with any PostgreSQL client library.
33 |
34 | For Python, the [connector-x](https://github.com/sfu-db/connector-x) and [pyarrow](https://github.com/apache/arrow) libraries are a powerful combination:
35 |
36 | ```python
37 | import connectorx as cx
38 | import pyarrow.parquet
39 | import pathlib
40 |
41 |
42 | # Read the data from PostgreSQL (here: our development instance)
43 | table = cx.read_sql(
44 | conn="postgresql://postgres:12345678@localhost:5432/database", # PostgreSQL connection string
45 | query="SELECT * FROM measurement ORDER BY creation_timestamp DESC LIMIT 256",
46 | return_type="arrow2",
47 | protocol="binary",
48 | )
49 | # Use the directory of the script as path for the file
50 | path = pathlib.Path(__file__).parent.resolve() / "measurements.parquet"
51 | # Write to parquet file
52 | pyarrow.parquet.write_table(table, where=path)
53 | ```
54 |
55 | You can then read and process this `parquet` file with your preferred tool, e.g., with [polars](https://github.com/pola-rs/polars):
56 |
57 | ```python
58 | import pathlib
59 | import polars
60 |
61 |
62 | # Use the directory of the script as path for the file
63 | path = pathlib.Path(__file__).parent.resolve() / "measurements.parquet"
64 | # Load the parquet file into polars
65 | dataframe = polars.read_parquet(path)
66 | # (optional) Transform the (attribute, value) columns into one column per attribute
67 | dataframe.pivot(
68 | values='value',
69 | index=['sensor_identifier', 'revision', 'creation_timestamp'],
70 | columns='attribute',
71 | aggregate_function='first',
72 | )
73 | ```
74 |
75 | You can access the other tables in the same way, e.g. to explore configurations and logs. Please refer to the [database schema](https://github.com/iterize/tenta/blob/main/server/schema.sql) for exact details on the available tables and columns.
76 |
77 |
78 | If your sensors send their current configuration's revision number with each
79 | measurement, you can join the `measurement` and `configuration` tables on
80 | `(sensor_identifier, revision)` to match each measurement with the associated
81 | configuration.
82 |
83 |
84 | ## Database access control
85 |
86 | The server should be the only user with write access to the database. If you want to give other people read access to the data, you should create a read-only user:
87 |
88 | ```sql
89 | CREATE ROLE reader WITH LOGIN PASSWORD '12345678';
90 | GRANT CONNECT ON DATABASE database TO reader;
91 | GRANT USAGE ON SCHEMA public TO reader;
92 | -- Grant read-only access to all tables in the public schema
93 | GRANT SELECT ON ALL TABLES IN SCHEMA public TO reader;
94 | ```
95 |
96 |
97 | See the [PostgreSQL documentation](https://www.postgresql.org/docs/) for more
98 | details on managing users and permissions.
99 |
100 |
101 | To restrict read-only access to certain networks, sensors, or attributes, you can use views. Instead of granting the `reader` user access to all tables, we can grant access only to measurements from a certain sensor:
102 |
103 | ```sql
104 | CREATE VIEW measurement_single_sensor AS
105 | SELECT *
106 | FROM measurement
107 | WHERE sensor_identifier = '81bf7042-e20f-4a97-ac44-c15853e3618f';
108 | -- Grant read-only access only to the view
109 | GRANT SELECT ON measurement_single_sensor TO reader;
110 | ```
111 |
--------------------------------------------------------------------------------
/docs/pages/index.mdx:
--------------------------------------------------------------------------------
1 | # Tenta
2 |
3 | 
4 |
5 | Tenta allows you to manage sensors remotely and in real-time:
6 |
7 | - Collect and store measurements and logs from sensors
8 | - Configure sensors remotely
9 | - Monitor sensors in real-time with an intuitive dashboard
10 |
11 | Tenta is lightweight and composable. It is designed to be used as a building block in your IoT stack, together with other awesome tools like [Grafana](https://grafana.com/), [DuckDB](https://duckdb.org/), or [polars](https://www.pola.rs/). Sensors connect to Tenta over a language-independent MQTT interface.
12 |
13 | _Read the documentation at [tenta.onrender.com](https://tenta.onrender.com/)_
14 |
15 | ## More
16 |
17 | **Publication:** [](https://joss.theoj.org/papers/5daf8d2d13c01da24e949c20a08d29d0)
18 |
19 | **License:** Tenta is licensed under the [MIT License](https://github.com/iterize/tenta/blob/main/LICENSE).
20 |
21 | **Research:** We are open for collaborations! If you want to use Tenta in your research, don't hesitate to reach out to contact@iterize.dev. We are happy to help you get started and provide support.
22 |
23 | **Contributing:** We are happy about contributions to Tenta! You can start by reading [our contribution guide](https://tenta.onrender.com/contribute).
24 |
25 | **Versioning:** Tenta's MQTT, HTTP, and database interfaces adhere to Semantic Versioning. Changes will be tracked in release notes. Please expect breaking changes until we reach version 1.0.0.
26 |
--------------------------------------------------------------------------------
/docs/pages/introduction.mdx:
--------------------------------------------------------------------------------
1 | # Introduction
2 |
3 | This documentation aims to cover everything you need to know to use Tenta in your projects.
4 |
5 | If you get stuck somewhere or if you have any questions, don't hesitate to open a discussion on GitHub! If you find a bug, we're happy if you open an issue. For more insight into MQTT than we can provide here, the [HiveMQ MQTT Essentials guide](https://www.hivemq.com/mqtt-essentials/) is a great introduction.
6 |
7 | When you're new to a project, you have a very valuable view of the documentation: You can spot ambiguities and unclear explanations much better than maintainers! If you find an error somewhere or if you feel there's anything that can improve these docs, please don't hesitate to open an issue or a pull request on GitHub.
8 |
9 | All right, enough chit-chat, let's dive in! 🤿
10 |
--------------------------------------------------------------------------------
/docs/pages/mqtt.mdx:
--------------------------------------------------------------------------------
1 | # MQTT API
2 |
3 | The communication between the sensors and the server runs over four MQTT topics:
4 |
5 | - `measurements/`: Measurements from sensors
6 | - `logs/`: Logs from sensors
7 | - `configurations/`: Configurations to sensors
8 | - `acknowledgments/`: Configuration acknowledgments from sensors
9 |
10 | To ensure that messages arrive reliably, sensors should publish and subscribe with Quality of Service (QoS) level `1`. This ensures that messages are delivered at least once. The server publishes configurations as retained messages. This means that when sensors are offline, the MQTT broker retains the latest configuration and relays it to the sensors when they reconnect and subscribe to their configurations topic.
11 |
12 | ## Payloads
13 |
14 | The payloads are JSON encoded and have the following structure:
15 |
16 | **`measurements/`:**
17 |
18 | ```json
19 | // Array structure allows to batch messages
20 | [
21 | {
22 | "value": {
23 | // Data points have type double
24 | "temperature": 23.1,
25 | "humidity": 0.62
26 | },
27 | "timestamp": 1683645000.0,
28 | "revision": 0 // Optional
29 | }
30 | ]
31 | ```
32 |
33 | **`logs/`:**
34 |
35 | ```json
36 | // Array structure allows to batch messages
37 | [
38 | {
39 | "message": "The CPU is burning; Please call the fire department.",
40 | "severity": "error", // One of: info, warning, error
41 | "timestamp": 1683645000.0,
42 | "revision": 0 // Optional
43 | }
44 | ]
45 | ```
46 |
47 | **`configurations/`:**
48 |
49 | ```json
50 | {
51 | "configuration": {}, // Can be any valid JSON object
52 | "revision": 0
53 | }
54 | ```
55 |
56 | **`acknowledgments/`:**
57 |
58 | ```json
59 | // Array structure allows to batch messages
60 | [
61 | {
62 | "success": true, // Did the sensor successfully process the configuration?
63 | "timestamp": 1683645000.0,
64 | "revision": 0
65 | }
66 | ]
67 | ```
68 |
--------------------------------------------------------------------------------
/docs/pages/overview.mdx:
--------------------------------------------------------------------------------
1 | # System overview
2 |
3 | Tenta consists of a server and a dashboard. The server communicates with the sensors via an intermediate MQTT broker and exposes a REST API for the dashboard. Data is stored in a PostgreSQL+TimescaleDB database.
4 |
5 | {/* TODO: Introduce demo instance here that can be used for all following examples */}
6 |
7 | 
8 |
--------------------------------------------------------------------------------
/docs/pages/roadmap.mdx:
--------------------------------------------------------------------------------
1 | # Roadmap
2 |
3 |
4 |
5 | - [x] ~Live charts of measurements on the dashboard~
6 | - [x] ~Optimize dashboard for mobile~
7 | - [ ] Indicator if sensors are currently connected to the MQTT broker
8 | - [ ] Associate lon/lat/alt with each measurement and display this data in the dashboard
9 | - [ ] User management: add/remove users to/from a network, different roles
10 | - [ ] Demo instance to make Tenta easy to try out
11 | - [ ] Multi-tenancy: isolate data from different networks/sensors
12 | - [ ] Tagging system to record metadata (e.g., nearby construction work, changes to the hardware, etc.)
13 | - [ ] Compress data in the database to use less storage
14 |
15 | ---
16 |
17 | If you have a feature in mind that's not listed here, don't hesitate to open a [discussion on GitHub](https://github.com/iterize/tenta/discussions)! 🍰
18 |
--------------------------------------------------------------------------------
/docs/postcss.config.js:
--------------------------------------------------------------------------------
1 | // If you want to use other PostCSS plugins, see the following:
2 | // https://tailwindcss.com/docs/using-with-preprocessors
3 | /** @type {import('postcss').Postcss} */
4 | module.exports = {
5 | plugins: {
6 | "postcss-import": {},
7 | "tailwindcss/nesting": {},
8 | tailwindcss: {},
9 | autoprefixer: {},
10 | },
11 | };
12 |
--------------------------------------------------------------------------------
/docs/public/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/docs/public/architecture.png
--------------------------------------------------------------------------------
/docs/public/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/docs/public/banner.png
--------------------------------------------------------------------------------
/docs/style.css:
--------------------------------------------------------------------------------
1 | @tailwind base;
2 | @tailwind components;
3 | @tailwind utilities;
4 |
5 | :root {
6 | --nextra-navbar-height: 3.25rem;
7 |
8 | .nextra-banner-container {
9 | background: #f59e0b !important;
10 | }
11 | }
12 |
13 | .nx-duration-500 {
14 | transition-duration: 100ms;
15 | }
16 |
17 | .nx-transition-colors {
18 | transition-duration: 50ms;
19 | }
20 |
21 | footer div.nx-py-12 {
22 | @apply py-3 font-semibold text-sm text-gray-800 items-center justify-center;
23 | }
24 |
--------------------------------------------------------------------------------
/docs/tailwind.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('tailwindcss').Config} */
2 | module.exports = {
3 | darkMode: "class",
4 | content: [
5 | "./pages/**/*.{js,ts,jsx,tsx,mdx}",
6 | "./theme.config.jsx",
7 | "./style.css",
8 | ],
9 | theme: {
10 | extend: {
11 | fontFamily: {
12 | serif: ["var(--next-font-google-crimson-pro)", "serif"],
13 | },
14 | fontSize: {
15 | "2xs": ["0.625rem", "0.75rem"],
16 | },
17 | },
18 | },
19 | plugins: [],
20 | darkMode: "class",
21 | };
22 |
--------------------------------------------------------------------------------
/docs/theme.config.jsx:
--------------------------------------------------------------------------------
1 | export default {
2 | logo: (
3 |
4 |
5 |
20 |
21 |
22 | Tenta
23 | Documentation
24 |
25 |
26 | ),
27 | project: {
28 | link: "https://github.com/iterize/tenta",
29 | icon: (
30 |
49 | ),
50 | },
51 | docsRepositoryBase: "https://github.com/iterize/tenta/tree/main/docs",
52 | //primaryHue: 43,
53 | navigation: true,
54 | useNextSeoProps() {
55 | return {
56 | titleTemplate: "%s – Tenta",
57 | };
58 | },
59 | head: (
60 | <>
61 |
62 |
63 |
67 | >
68 | ),
69 | footer: {
70 | text: (
71 | © Felix Böhm and Moritz Makowski, {new Date().getFullYear()}
72 | ),
73 | },
74 | faviconGlyph: "🏔️",
75 | /*banner: {
76 | key: "v1.0.0-release",
77 | text: "🎉 Tenta v0.1.0 has been released",
78 | },*/
79 | toc: {
80 | float: true,
81 | },
82 | };
83 |
--------------------------------------------------------------------------------
/docs/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["dom", "dom.iterable", "esnext"],
4 | "allowJs": true,
5 | "skipLibCheck": true,
6 | "strict": false,
7 | "forceConsistentCasingInFileNames": true,
8 | "noEmit": true,
9 | "incremental": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve"
16 | },
17 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
18 | "exclude": ["node_modules"]
19 | }
--------------------------------------------------------------------------------
/publication/images/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/publication/images/architecture.png
--------------------------------------------------------------------------------
/publication/images/configurations.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/publication/images/configurations.png
--------------------------------------------------------------------------------
/publication/images/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/publication/images/screenshot.png
--------------------------------------------------------------------------------
/publication/paper.bib:
--------------------------------------------------------------------------------
1 | @article{dietrich2021muccnet,
2 | title={MUCCnet: Munich urban carbon column network},
3 | author={Dietrich, Florian and Chen, Jia and Voggenreiter, Benno and Aigner, Patrick and Nachtigall, Nico and Reger, Bj{\"o}rn},
4 | journal={Atmospheric Measurement Techniques},
5 | volume={14},
6 | number={2},
7 | pages={1111--1126},
8 | year={2021},
9 | publisher={Copernicus GmbH},
10 | doi={10.5194/amt-14-1111-2021}
11 | }
12 | @proceedings{midcost,
13 | title={Advancing Urban Greenhouse Gas Monitoring: Development and Evaluation of a High-Density CO\textsubscript{2} Sensor Network in {Munich}},
14 | author={Aigner, Patrick and K{\"u}bacher, Daniel and Wenzel, Adrian and Schmitt, Adrian and B{\"o}hm, Felix and Makowski, Moritz and Angleitner, Moritz and K{\"u}rzinger, Klaus and Laurent, Olivier and Rubli, Pascal and Grange, Stuart and Emmenegger, Lukas and Chen, Jia},
15 | booktitle={ICOS Science Conference},
16 | year={2024},
17 | }
18 | @article{muller2020integration,
19 | title={Integration and calibration of non-dispersive infrared (NDIR) CO\textsubscript{2} low-cost sensors and their operation in a sensor network covering {Switzerland}},
20 | author={M{\"u}ller, Michael and Graf, Peter and Meyer, Jonas and Pentina, Anastasia and Brunner, Dominik and Perez-Cruz, Fernando and H{\"u}glin, Christoph and Emmenegger, Lukas},
21 | journal={Atmospheric Measurement Techniques},
22 | volume={13},
23 | number={7},
24 | pages={3815--3834},
25 | year={2020},
26 | publisher={Copernicus GmbH},
27 | doi={10.5194/amt-13-3815-2020}
28 | }
29 | @article{zweifel2021trees,
30 | title={Why trees grow at night},
31 | author={Zweifel, Roman and Sterck, Frank and Braun, Sabine and Buchmann, Nina and Eugster, Werner and Gessler, Arthur and H{\"a}ni, Matthias and Peters, Richard L and Walthert, Lorenz and Wilhelm, Micah and others},
32 | journal={New Phytologist},
33 | volume={231},
34 | number={6},
35 | pages={2174--2185},
36 | year={2021},
37 | publisher={Wiley Online Library},
38 | doi={10.1111/nph.17552}
39 | }
40 | @inproceedings{zhang2004hardware,
41 | title={Hardware design experiences in ZebraNet},
42 | author={Zhang, Pei and Sadler, Christopher M and Lyon, Stephen A and Martonosi, Margaret},
43 | booktitle={Proceedings of the 2nd international conference on Embedded networked sensor systems},
44 | pages={227--238},
45 | year={2004},
46 | doi={10.1145/1031495.1031522}
47 | }
48 | @article{werner2006deploying,
49 | title={Deploying a wireless sensor network on an active volcano},
50 | author={Werner-Allen, Geoffrey and Lorincz, Konrad and Ruiz, Mario and Marcillo, Omar and Johnson, Jeff and Lees, Jonathan and Welsh, Matt},
51 | journal={IEEE internet computing},
52 | volume={10},
53 | number={2},
54 | pages={18--25},
55 | year={2006},
56 | publisher={IEEE},
57 | doi={10.1109/MIC.2006.26}
58 | }
59 | @inproceedings{wenzel2021stand,
60 | title={Stand-alone low-cost sensor network in the inner city of {Munich} for modeling urban air pollutants},
61 | author={Wenzel, Adrian and Chen, Jia and Dietrich, Florian and Thekkekara, Sebastian T and Zollitsch, Daniel and Voggenreiter, Benno and Setili, Luca and Wenig, Mark and Keutsch, Frank N},
62 | booktitle={EGU General Assembly Conference Abstracts},
63 | pages={EGU21--15182},
64 | year={2021},
65 | doi={10.5194/egusphere-egu21-15182}
66 | }
67 | @software{frost,
68 | title = {FROST Server},
69 | author = {van der Schaaf, Hylke},
70 | journal = {GitHub repository},
71 | publisher = {GitHub},
72 | url = {https://github.com/FraunhoferIOSB/FROST-Server},
73 | version = {2.3.1},
74 | date = {2024-03-08}
75 | }
76 | @software{polars,
77 | doi = {10.5281/zenodo.7697217},
78 | url = {https://zenodo.org/doi/10.5281/zenodo.7697217},
79 | author = {Ritchie Vink, and Stijn de Gooijer, and Alexander Beedie, and Marco Edward Gorelli, and nameexhaustion, and Orson Peters, and Gijs Burghoorn, and Weijie Guo, and J van Zundert, and Gert Hulselmans, and Cory Grinstead, and Marshall, and chielP, and Itamar Turner-Trauring, and Lawrence Mitchell, and Luke Manley, and Matteo Santamaria, and Daniël Heres, and Henry Harbeck, and Josh Magarick, and Karl Genockey, and ibENPC, and deanm0000, and Ion Koutsouris, and Moritz Wilksch, and eitsupi, and Jorge Leitao, and Mick van Gelderen, and Rodrigo Girão Serrão, },
80 | title = {pola-rs/polars: Python Polars 1.19.0},
81 | publisher = {Zenodo},
82 | year = {2025},
83 | month = {jan},
84 | version = {py-1.19.0}
85 | }
86 | @software{grafana,
87 | title = {Grafana},
88 | author = {Torkel {\"O}degaard},
89 | journal = {GitHub repository},
90 | publisher = {GitHub},
91 | url = {https://github.com/grafana/grafana},
92 | version = {10.4.1},
93 | date = {2024-03-21}
94 | }
95 | @software{timescale,
96 | title = {TimescaleDB},
97 | author = {{Timescale}},
98 | journal = {GitHub repository},
99 | publisher = {GitHub},
100 | url = {https://github.com/timescale/timescaledb},
101 | version = {2.14.2},
102 | date = {2024-02-20}
103 | }
104 | @software{thingsboard,
105 | title = {ThingsBoard},
106 | author = {{ThingsBoard}},
107 | journal = {GitHub repository},
108 | publisher = {GitHub},
109 | url = {https://github.com/thingsboard/thingsboard},
110 | version = {3.6.3},
111 | date = {2024-03-18}
112 | }
113 | @software{thingspeak,
114 | title = {ThingSpeak},
115 | author = {Lee Lawlor},
116 | journal = {GitHub repository},
117 | publisher = {GitHub},
118 | url = {https://github.com/iobridge/thingspeak},
119 | year = {2011}
120 | }
121 | @software{postgres,
122 | title = {PostgreSQL},
123 | author = {{PostgreSQL Global Development Group}},
124 | journal = {GitHub repository},
125 | publisher = {GitHub},
126 | url = {https://github.com/postgres/postgres},
127 | version = {16.2.0},
128 | date = {2024-02-08}
129 | }
130 | @article{bart2014high,
131 | title={High density ozone monitoring using gas sensitive semi-conductor sensors in the {Lower Fraser Valley, British Columbia}},
132 | author={Bart, Mark and Williams, David E and Ainslie, Bruce and McKendry, Ian and Salmond, Jennifer and Grange, Stuart K and Alavi-Shoshtari, Maryam and Steyn, Douw and Henshaw, Geoff S},
133 | journal={Environmental science \& technology},
134 | volume={48},
135 | number={7},
136 | pages={3970--3977},
137 | year={2014},
138 | publisher={ACS Publications},
139 | doi={10.1021/es404610t}
140 | }
141 | @inproceedings{tolle2005design,
142 | title={Design of an application-cooperative management system for wireless sensor networks},
143 | author={Tolle, Gilman and Culler, David},
144 | booktitle={Proceeedings of the Second European Workshop on Wireless Sensor Networks, 2005.},
145 | pages={121--132},
146 | year={2005},
147 | organization={IEEE},
148 | doi={10.1109/EWSN.2005.1462004}
149 | }
150 | @article{burri2019did,
151 | title={How did {Swiss} forest trees respond to the hot summer 2015?},
152 | author={Burri, Susanne and Haeler, Elena and Eugster, Werner and Haeni, Matthias and Etzold, Sophia and Walthert, Lorenz and Braun, Sabine and Zweifel, Roman},
153 | journal={Die Erde},
154 | volume={150},
155 | number={4},
156 | pages={214--229},
157 | year={2019},
158 | publisher={Gesellschaft f{\"u}r Erdkunde zu Berlin},
159 | doi={10.12854/erde-2019-420}
160 | }
161 | @article{tavakolizadeh2021thing,
162 | title={Thing Directory: Simple and lightweight registry of IoT device metadata},
163 | author={Tavakolizadeh, Farshid and Devasya, Shreekantha},
164 | journal={Journal of Open Source Software},
165 | volume={6},
166 | number={60},
167 | pages={3075},
168 | year={2021},
169 | doi={10.21105/joss.03075}
170 | }
171 | @software{ivy,
172 | author = {Makowski, Moritz and Böhm, Felix and Chen, Jia and Aigner, Patrick},
173 | title = {Ivy: A Data Acquisition System for Distributed Sensor Networks Supporting Remote Configuration and Software Updates},
174 | month = dec,
175 | year = 2024,
176 | publisher = {Zenodo},
177 | version = {1.0.0},
178 | doi = {10.5281/zenodo.14562882},
179 | url = {https://doi.org/10.5281/zenodo.14562882},
180 | }
181 |
--------------------------------------------------------------------------------
/server/.env.example:
--------------------------------------------------------------------------------
1 | ENVIRONMENT=production
2 |
3 | # PostgreSQL credentials
4 | POSTGRESQL_HOSTNAME=www.example.com
5 | POSTGRESQL_PORT=5432
6 | POSTGRESQL_IDENTIFIER=username
7 | POSTGRESQL_PASSWORD=12345678
8 | POSTGRESQL_DATABASE=database
9 |
10 | # MQTT credentials
11 | MQTT_HOSTNAME=www.example.com
12 | MQTT_PORT=8883
13 | MQTT_IDENTIFIER=username
14 | MQTT_PASSWORD=12345678
15 |
--------------------------------------------------------------------------------
/server/.gitignore:
--------------------------------------------------------------------------------
1 | notebooks
2 | scripts/deploy
3 |
--------------------------------------------------------------------------------
/server/.python-version:
--------------------------------------------------------------------------------
1 | 3.11
2 |
--------------------------------------------------------------------------------
/server/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.11.0-slim
2 |
3 | LABEL maintainer="Felix Böhm "
4 | LABEL source="https://github.com/tum-esm/insert-name-here"
5 |
6 | # Install poetry
7 | ENV POETRY_VERSION=1.4.2 \
8 | POETRY_HOME=/opt/poetry
9 | RUN python -m venv $POETRY_HOME && \
10 | $POETRY_HOME/bin/pip install poetry==${POETRY_VERSION}
11 | ENV PATH="${PATH}:${POETRY_HOME}/bin"
12 |
13 | # Copy dependency information
14 | COPY pyproject.toml poetry.lock /
15 |
16 | # Install dependencies
17 | RUN poetry config virtualenvs.create false && \
18 | poetry install --no-root --no-ansi --no-interaction
19 |
20 | EXPOSE 8421
21 |
22 | # Read commit hash and branch name as build arguments
23 | ARG commit_sha branch_name
24 | LABEL commit_sha=${commit_sha} branch_name=${branch_name}
25 | ENV COMMIT_SHA=${commit_sha} BRANCH_NAME=${branch_name}
26 |
27 | COPY /app /app
28 |
29 | CMD poetry run uvicorn app.main:app --host 0.0.0.0 --port 8421 --log-level info
30 |
--------------------------------------------------------------------------------
/server/app/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/server/app/__init__.py
--------------------------------------------------------------------------------
/server/app/auth.py:
--------------------------------------------------------------------------------
1 | import enum
2 | import hashlib
3 | import logging
4 | import secrets
5 |
6 | import passlib.context
7 | import starlette.authentication
8 | import starlette.requests
9 |
10 | import app.database as database
11 | import app.errors as errors
12 |
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | ########################################################################################
18 | # Password Utilities
19 | ########################################################################################
20 |
21 |
22 | _CONTEXT = passlib.context.CryptContext(schemes=["argon2"], deprecated="auto")
23 |
24 |
25 | def hash_password(password):
26 | """Hash the given password and return the hash as string."""
27 | return _CONTEXT.hash(password)
28 |
29 |
30 | def verify_password(password, password_hash):
31 | """Return true if the password results in the hash, else False."""
32 | return _CONTEXT.verify(password, password_hash)
33 |
34 |
35 | ########################################################################################
36 | # Token Utilities
37 | ########################################################################################
38 |
39 |
40 | def generate_token():
41 | """Create and return a random string useful for authentication."""
42 | return secrets.token_hex(32)
43 |
44 |
45 | def hash_token(token):
46 | """Hash the given token and return the hash as string."""
47 | return hashlib.sha512(token.encode("utf-8")).hexdigest()
48 |
49 |
50 | ########################################################################################
51 | # Authentication middleware
52 | ########################################################################################
53 |
54 |
55 | class AuthenticationMiddleware:
56 | """Validates Bearer authorization headers and provides the requester's identity.
57 |
58 | The structure is adapted from Starlette's own AuthenticationMiddleware class.
59 | """
60 |
61 | def __init__(self, app):
62 | self.app = app
63 |
64 | async def _authenticate(self, request):
65 | # Extract the access token from the authorization header
66 | if "authorization" not in request.headers:
67 | return None
68 | try:
69 | scheme, access_token = request.headers["authorization"].split()
70 | except ValueError:
71 | logger.warning("Malformed authorization header")
72 | return None
73 | if scheme.lower() != "bearer":
74 | logger.warning("Malformed authorization header")
75 | return None
76 | # Check if we have the access token in the database
77 | query, arguments = database.parametrize(
78 | identifier="authenticate",
79 | arguments={"access_token_hash": hash_token(access_token)},
80 | )
81 | elements = await request.state.dbpool.fetch(query, *arguments)
82 | elements = database.dictify(elements)
83 | # If the result set is empty, the access token is invalid
84 | if len(elements) == 0:
85 | logger.warning("Invalid access token")
86 | return None
87 | # Return the requester's identity
88 | return elements[0]["user_identifier"]
89 |
90 | async def __call__(self, scope, receive, send):
91 | # Only process HTTP requests, not websockets
92 | if scope["type"] != "http":
93 | await self.app(scope, receive, send)
94 | return
95 | # Authenticate and pass the result through to the route
96 | request = starlette.requests.Request(scope)
97 | request.state.identity = await self._authenticate(request)
98 | await self.app(scope, receive, send)
99 |
100 |
101 | ########################################################################################
102 | # Authorization helpers
103 | ########################################################################################
104 |
105 |
106 | @enum.unique
107 | class Relationship(enum.IntEnum):
108 | NONE = 0 # The requester is not authenticated
109 | DEFAULT = 1 # The requester is authenticated, but no relationship exists
110 | OWNER = 2
111 |
112 |
113 | class Resource:
114 | """Base class for resources that need authorization checks."""
115 |
116 | def __init__(self, identifier):
117 | self.identifier = identifier
118 |
119 | async def _authorize(self, request):
120 | """Return the relationship between the requester and the resource."""
121 | raise NotImplementedError
122 |
123 |
124 | class User(Resource):
125 | async def _authorize(self, request):
126 | if request.state.identity is None:
127 | return Relationship.NONE
128 | if request.state.identity == self.identifier:
129 | return Relationship.OWNER
130 | return Relationship.DEFAULT
131 |
132 |
133 | class Network(Resource):
134 | async def _authorize(self, request):
135 | if request.state.identity is None:
136 | return Relationship.NONE
137 | query, arguments = database.parametrize(
138 | identifier="authorize-resource-network",
139 | arguments={
140 | "user_identifier": request.state.identity,
141 | "network_identifier": self.identifier,
142 | },
143 | )
144 | elements = await request.state.dbpool.fetch(query, *arguments)
145 | elements = database.dictify(elements)
146 | if len(elements) == 0:
147 | raise errors.NotFoundError
148 | return (
149 | Relationship.DEFAULT
150 | if elements[0]["user_identifier"] is None
151 | else Relationship.OWNER
152 | )
153 |
154 |
155 | class Sensor(Resource):
156 | async def _authorize(self, request):
157 | if request.state.identity is None:
158 | return Relationship.NONE
159 | query, arguments = database.parametrize(
160 | identifier="authorize-resource-sensor",
161 | arguments={
162 | "user_identifier": request.state.identity,
163 | "network_identifier": self.identifier["network_identifier"],
164 | "sensor_identifier": self.identifier["sensor_identifier"],
165 | },
166 | )
167 | elements = await request.state.dbpool.fetch(query, *arguments)
168 | elements = database.dictify(elements)
169 | if len(elements) == 0:
170 | raise errors.NotFoundError
171 | return (
172 | Relationship.DEFAULT
173 | if elements[0]["user_identifier"] is None
174 | else Relationship.OWNER
175 | )
176 |
177 |
178 | async def authorize(request, resource):
179 | """Check what relationship (ReBAC) the requester has with the resource."""
180 | relationship = await resource._authorize(request)
181 | logger.debug(f"Requester has {relationship.name} relationship")
182 | return relationship
183 |
--------------------------------------------------------------------------------
/server/app/database.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import json
3 | import os
4 | import string
5 |
6 | import asyncpg
7 | import pendulum
8 |
9 | import app.settings as settings
10 |
11 |
12 | def prepare():
13 | """Load SQL queries from `queries.sql` file."""
14 | with open(os.path.join(os.path.dirname(__file__), "queries.sql"), "r") as file:
15 | statements = file.read().split("\n\n\n")
16 | # Validate format
17 | assert all(statement.startswith("-- name: ") for statement in statements)
18 | assert not any("\n-- name: " in statement for statement in statements)
19 | return {
20 | statement.split("\n", 1)[0][9:]: statement.split("\n", 1)[1]
21 | for statement in statements
22 | }
23 |
24 |
25 | queries = prepare()
26 |
27 |
28 | def parametrize(identifier, arguments):
29 | """Return the query and translate named arguments into valid PostgreSQL."""
30 | template = string.Template(queries[identifier])
31 | single = isinstance(arguments, dict)
32 | # Get a list of the query argument names from the template
33 | keys = template.get_identifiers()
34 | # Raise an error if unknown arguments are passed
35 | if diff := set(arguments.keys() if single else arguments[0].keys()) - set(keys):
36 | raise ValueError(f"Unknown query arguments: {diff}")
37 | # Replace named arguments with native numbered arguments
38 | query = template.substitute({key: f"${i+1}" for i, key in enumerate(keys)})
39 | # Build argument tuple and fill missing arguments with None
40 | arguments = (
41 | tuple(arguments.get(key) for key in keys)
42 | if single
43 | else [tuple(x.get(key) for key in keys) for x in arguments]
44 | )
45 | return query, arguments
46 |
47 |
48 | def dictify(elements):
49 | """Cast a asyncpg SELECT query result into a list of dictionaries."""
50 | # TODO: implement this as a custom asyncpg record_class on pool?
51 | # see https://magicstack.github.io/asyncpg/current/api/index.html#connection-pools
52 | return [dict(record) for record in elements]
53 |
54 |
55 | async def initialize(connection):
56 | # Automatically encode/decode TIMESTAMPTZ fields to/from unix timestamps
57 | await connection.set_type_codec(
58 | typename="timestamptz",
59 | schema="pg_catalog",
60 | encoder=lambda x: pendulum.from_timestamp(x).isoformat(),
61 | decoder=lambda x: pendulum.parse(x).float_timestamp,
62 | )
63 | # Automatically encode/decode JSONB fields to/from str
64 | await connection.set_type_codec(
65 | typename="jsonb",
66 | schema="pg_catalog",
67 | encoder=json.dumps,
68 | decoder=json.loads,
69 | )
70 | # Automatically encode/decode UUID fields to/from str
71 | await connection.set_type_codec(
72 | typename="uuid",
73 | schema="pg_catalog",
74 | encoder=str,
75 | decoder=str,
76 | )
77 |
78 |
79 | @contextlib.asynccontextmanager
80 | async def pool():
81 | """Context manager for asyncpg database pool with custom settings."""
82 | async with asyncpg.create_pool(
83 | host=settings.POSTGRESQL_HOSTNAME,
84 | port=settings.POSTGRESQL_PORT,
85 | user=settings.POSTGRESQL_IDENTIFIER,
86 | password=settings.POSTGRESQL_PASSWORD,
87 | database=settings.POSTGRESQL_DATABASE,
88 | min_size=2,
89 | max_size=4,
90 | max_queries=16384,
91 | max_inactive_connection_lifetime=300,
92 | init=initialize,
93 | ) as x:
94 | yield x
95 |
--------------------------------------------------------------------------------
/server/app/errors.py:
--------------------------------------------------------------------------------
1 | import starlette.exceptions
2 | import starlette.responses
3 |
4 |
5 | ########################################################################################
6 | # Custom starlette error handlers
7 | ########################################################################################
8 |
9 |
10 | async def handle(request, exc):
11 | """Return JSON instead of the default text/plain for handled exceptions."""
12 | return starlette.responses.JSONResponse(
13 | status_code=exc.status_code,
14 | content={"details": exc.detail},
15 | headers=exc.headers,
16 | )
17 |
18 |
19 | async def panic(request, exc):
20 | """Return JSON instead of the default text/plain for errors."""
21 | return starlette.responses.JSONResponse( # pragma: no cover
22 | status_code=500,
23 | content={"details": "Internal Server Error"},
24 | )
25 |
26 |
27 | ########################################################################################
28 | # Custom error class to reduce duplication when raising errors
29 | ########################################################################################
30 |
31 |
32 | class _CustomError(starlette.exceptions.HTTPException):
33 | def __init__(self):
34 | super().__init__(self.STATUS_CODE, self.DETAILS)
35 |
36 |
37 | ########################################################################################
38 | # Standard HTTP errors
39 | ########################################################################################
40 |
41 |
42 | class BadRequestError(_CustomError):
43 | STATUS_CODE = 400
44 | DETAILS = "Bad Request"
45 |
46 |
47 | class UnauthorizedError(_CustomError):
48 | STATUS_CODE = 401
49 | DETAILS = "Unauthorized"
50 |
51 |
52 | class ForbiddenError(_CustomError):
53 | STATUS_CODE = 403
54 | DETAILS = "Forbidden"
55 |
56 |
57 | class NotFoundError(_CustomError):
58 | STATUS_CODE = 404
59 | DETAILS = "Not Found"
60 |
61 |
62 | class ConflictError(_CustomError):
63 | STATUS_CODE = 409
64 | DETAILS = "Conflict"
65 |
--------------------------------------------------------------------------------
/server/app/logs.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import time
3 |
4 | import app.settings as settings
5 |
6 |
7 | class Color:
8 | red = "\x1b[31m"
9 | yellow = "\x1b[33m"
10 | blue = "\x1b[34m"
11 | magenta = "\x1b[35m"
12 | cyan = "\x1b[36m"
13 | white = "\x1b[38m"
14 |
15 |
16 | class Style:
17 | bold = "\x1b[1m"
18 | reset = "\x1b[0m"
19 |
20 |
21 | class CustomFormatter(logging.Formatter):
22 | _FMTS = {
23 | logging.DEBUG: (
24 | f"{Color.cyan}%(asctime)s{Style.reset} | "
25 | f"{Style.bold}{Color.blue}%(levelname)-8s{Style.reset} | "
26 | f"{Color.magenta}%(name)s:%(lineno)s{Style.reset} - %(message)s"
27 | ),
28 | logging.INFO: (
29 | f"{Color.cyan}%(asctime)s{Style.reset} | "
30 | f"{Style.bold}{Color.white}%(levelname)-8s{Style.reset} | "
31 | f"{Color.magenta}%(name)s:%(lineno)s{Style.reset} - %(message)s"
32 | ),
33 | logging.WARNING: (
34 | f"{Color.cyan}%(asctime)s{Style.reset} | "
35 | f"{Style.bold}{Color.yellow}%(levelname)-8s{Style.reset} | "
36 | f"{Color.magenta}%(name)s:%(lineno)s{Style.reset} - %(message)s"
37 | ),
38 | logging.ERROR: (
39 | f"{Color.cyan}%(asctime)s{Style.reset} | "
40 | f"{Style.bold}{Color.red}%(levelname)-8s{Style.reset} | "
41 | f"{Color.magenta}%(name)s:%(lineno)s{Style.reset} - %(message)s"
42 | ),
43 | }
44 |
45 | def __init__(self):
46 | super().__init__()
47 | self._formatters = dict()
48 | for key, value in self._FMTS.items():
49 | formatter = logging.Formatter(fmt=value, datefmt="%a %Y-%m-%d %H:%M:%S")
50 | formatter.converter = time.gmtime # Log in UTC rather than local time
51 | self._formatters[key] = formatter
52 |
53 | def format(self, record):
54 | return self._formatters[record.levelno].format(record)
55 |
56 |
57 | def configure():
58 | """Remove library handlers and handle messages at the root logger for consistency.
59 |
60 | Note that this does not affect the logs of watchfiles, which reloads the server on
61 | file changes during development. This is because uvicorn cannot load watchfiles
62 | at runtime, so it's loaded before the server starts. watchfiles is not active in
63 | production.
64 |
65 | """
66 | for name in logging.root.manager.loggerDict.keys():
67 | # Remove all existing handlers and let messages propagate to the root logger
68 | logging.getLogger(name).handlers = []
69 | logging.getLogger(name).propagate = True
70 | # Initialize our handler and custom formatter
71 | formatter = CustomFormatter()
72 | handler = logging.StreamHandler()
73 | handler.setFormatter(formatter)
74 | # Assign our handler to the root logger
75 | logging.root.handlers = [handler]
76 | logging.root.setLevel(
77 | logging.INFO if settings.ENVIRONMENT == "production" else logging.DEBUG
78 | )
79 |
--------------------------------------------------------------------------------
/server/app/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import app.utils as utils
4 |
5 |
6 | # Environment: test, development, production
7 | ENVIRONMENT = os.environ["ENVIRONMENT"]
8 | # Git commit hash
9 | COMMIT_SHA = os.environ["COMMIT_SHA"]
10 | # Git branch name
11 | BRANCH_NAME = os.environ["BRANCH_NAME"]
12 | # Timestamp of server startup
13 | START_TIMESTAMP = utils.timestamp()
14 |
15 | # PostgreSQL connection details
16 | POSTGRESQL_HOSTNAME = os.environ["POSTGRESQL_HOSTNAME"]
17 | POSTGRESQL_PORT = int(os.environ["POSTGRESQL_PORT"])
18 | POSTGRESQL_IDENTIFIER = os.environ["POSTGRESQL_IDENTIFIER"]
19 | POSTGRESQL_PASSWORD = os.environ["POSTGRESQL_PASSWORD"]
20 | POSTGRESQL_DATABASE = os.environ["POSTGRESQL_DATABASE"]
21 |
22 | # MQTT connection details
23 | MQTT_HOSTNAME = os.environ["MQTT_HOSTNAME"]
24 | MQTT_PORT = int(os.environ["MQTT_PORT"])
25 | MQTT_IDENTIFIER = os.environ["MQTT_IDENTIFIER"]
26 | MQTT_PASSWORD = os.environ["MQTT_PASSWORD"]
27 |
--------------------------------------------------------------------------------
/server/app/utils.py:
--------------------------------------------------------------------------------
1 | import time
2 | import random
3 |
4 |
5 | def timestamp():
6 | """Return current UTC time as unixtime float."""
7 | return time.time()
8 |
9 |
10 | def backoff():
11 | """Return exponential backoff intervals for retries."""
12 | value = 1
13 | while True:
14 | yield value + random.random() - 0.5
15 | if value < 256: # Limit the backoff to about 5 minutes
16 | value *= 2
17 |
--------------------------------------------------------------------------------
/server/app/validation/__init__.py:
--------------------------------------------------------------------------------
1 | from .mqtt import (
2 | Acknowledgment,
3 | AcknowledgmentsValidator,
4 | Log,
5 | LogsValidator,
6 | Measurement,
7 | MeasurementsValidator,
8 | )
9 | from .routes import (
10 | CreateConfigurationRequest,
11 | CreateNetworkRequest,
12 | CreateSensorRequest,
13 | CreateSessionRequest,
14 | CreateUserRequest,
15 | ReadConfigurationsRequest,
16 | ReadLogsAggregatesRequest,
17 | ReadLogsRequest,
18 | ReadMeasurementsRequest,
19 | ReadNetworksRequest,
20 | ReadSensorsRequest,
21 | ReadStatusRequest,
22 | UpdateSensorRequest,
23 | validate,
24 | )
25 |
26 |
27 | __all__ = [
28 | "Acknowledgment",
29 | "Measurement",
30 | "Log",
31 | "AcknowledgmentsValidator",
32 | "MeasurementsValidator",
33 | "LogsValidator",
34 | "CreateSensorRequest",
35 | "CreateUserRequest",
36 | "CreateSessionRequest",
37 | "CreateConfigurationRequest",
38 | "ReadLogsAggregatesRequest",
39 | "ReadLogsRequest",
40 | "ReadConfigurationsRequest",
41 | "CreateNetworkRequest",
42 | "ReadMeasurementsRequest",
43 | "ReadStatusRequest",
44 | "ReadSensorsRequest",
45 | "ReadNetworksRequest",
46 | "UpdateSensorRequest",
47 | "validate",
48 | ]
49 |
--------------------------------------------------------------------------------
/server/app/validation/constants.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 |
4 | class Limit(int, enum.Enum):
5 | SMALL = 2**6 # 64
6 | MEDIUM = 2**8 # 256
7 | LARGE = 2**14 # 16384
8 | MAXINT4 = 2**31 # Maximum value signed 32-bit integer + 1
9 |
10 |
11 | class Pattern(str, enum.Enum):
12 | NAME = r"^[a-z0-9](-?[a-z0-9])*$"
13 | KEY = r"^[a-z0-9](_?[a-z0-9])*$"
14 | IDENTIFIER = ( # Version 4 UUID regex
15 | r"^[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}$"
16 | )
17 |
--------------------------------------------------------------------------------
/server/app/validation/mqtt.py:
--------------------------------------------------------------------------------
1 | import typing
2 |
3 | import pydantic
4 |
5 | import app.validation.constants as constants
6 | import app.validation.types as types
7 |
8 |
9 | ########################################################################################
10 | # Base types
11 | ########################################################################################
12 |
13 |
14 | class Acknowledgment(types.StrictModel):
15 | success: bool # Records if the configuration was processed successfully
16 | timestamp: types.Timestamp
17 | revision: types.Revision
18 |
19 |
20 | class Measurement(types.StrictModel):
21 | value: types.Measurement
22 | timestamp: types.Timestamp
23 | revision: types.Revision | None = None
24 |
25 |
26 | class Log(types.StrictModel):
27 | message: str # Can be empty, but must not be None
28 | severity: typing.Literal["info", "warning", "error"]
29 | timestamp: types.Timestamp
30 | revision: types.Revision | None = None
31 |
32 | @pydantic.field_validator("message")
33 | def trim(cls, v):
34 | return v[: constants.Limit.LARGE]
35 |
36 |
37 | ########################################################################################
38 | # Validators for the batched messages
39 | ########################################################################################
40 |
41 |
42 | AcknowledgmentsValidator = pydantic.TypeAdapter(
43 | pydantic.conlist(item_type=Acknowledgment, min_length=1),
44 | )
45 | MeasurementsValidator = pydantic.TypeAdapter(
46 | pydantic.conlist(item_type=Measurement, min_length=1),
47 | )
48 | LogsValidator = pydantic.TypeAdapter(
49 | pydantic.conlist(item_type=Log, min_length=1),
50 | )
51 |
--------------------------------------------------------------------------------
/server/app/validation/types.py:
--------------------------------------------------------------------------------
1 | import typing
2 |
3 | import pydantic
4 |
5 | import app.validation.constants as constants
6 |
7 |
8 | ########################################################################################
9 | # Base model
10 | ########################################################################################
11 |
12 |
13 | class StrictModel(pydantic.BaseModel):
14 | # TODO Set frozen=True, see routes.py for details
15 | model_config = pydantic.ConfigDict(strict=True, frozen=False, extra="forbid")
16 |
17 |
18 | class LooseModel(pydantic.BaseModel):
19 | model_config = pydantic.ConfigDict(strict=False, frozen=True, extra="forbid")
20 |
21 |
22 | class Configuration(StrictModel, extra="allow"):
23 | # TODO Validate the values more thoroughly for min and max limits/lengths
24 | # number of JSON fields or nesting depth could be interesting as well
25 | # Or, check the actual size of the JSON / length of the JSON string
26 | pass
27 |
28 |
29 | ########################################################################################
30 | # Types
31 | ########################################################################################
32 |
33 |
34 | Name = pydantic.constr(max_length=64, pattern=constants.Pattern.NAME.value)
35 | Identifier = pydantic.constr(pattern=constants.Pattern.IDENTIFIER.value)
36 | Password = pydantic.constr(min_length=8, max_length=constants.Limit.MEDIUM)
37 | Key = pydantic.constr(max_length=64, pattern=constants.Pattern.KEY.value)
38 |
39 | # PostgreSQL errors if an integer is out of range, so we must validate
40 | Revision = pydantic.conint(ge=0, lt=constants.Limit.MAXINT4)
41 |
42 | # PostgreSQL rounds if it cannot store a float in full precision, so we do not need to
43 | # validate min/max values here
44 | Timestamp = float
45 | Measurement = typing.Annotated[dict[Key, float], pydantic.Field(min_length=1)]
46 |
--------------------------------------------------------------------------------
/server/migrations/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/server/migrations/.gitkeep
--------------------------------------------------------------------------------
/server/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "tenta"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["Felix Böhm "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "~3.11"
9 | starlette = "^0.37.2"
10 | uvicorn = {extras = ["standard"], version = "^0.30.1"}
11 | asyncpg = "^0.29.0"
12 | pydantic = "^2.7.3"
13 | passlib = {extras = ["argon2"], version = "^1.7.4"}
14 | pendulum = "^3.0.0"
15 | aiomqtt = "^2.1.0"
16 |
17 | [tool.poetry.group.dev]
18 | optional = true
19 |
20 | [tool.poetry.group.dev.dependencies]
21 | jupyterlab = "^4.0.2"
22 | black = "^23.3.0"
23 | sqlfluff = "^2.1.1"
24 | httpx = "^0.24.0"
25 | ruff = "^0.0.275"
26 | pytest = "^7.4.0"
27 | pytest-cov = "^4.1.0"
28 | asgi-lifespan = "^2.1.0"
29 | pytest-asyncio = "^0.21.1"
30 |
31 | [tool.pytest.ini_options]
32 | asyncio_mode = "auto"
33 |
34 | [tool.black]
35 | preview = true
36 |
37 | [tool.ruff]
38 | line-length = 88
39 | select = ["E", "W", "F", "I", "N"]
40 | ignore = ["E501"]
41 | target-version = "py311"
42 |
43 | [tool.ruff.isort]
44 | lines-after-imports = 2
45 |
46 | [tool.ruff.pep8-naming]
47 | classmethod-decorators = ["classmethod", "pydantic.field_validator"]
48 |
49 | [tool.sqlfluff.core]
50 | dialect = "postgres"
51 | templater = "placeholder"
52 | exclude_rules = ["L029", "L032"]
53 |
54 | [tool.sqlfluff.templater.placeholder]
55 | param_style = "dollar"
56 | user_identifier = "'016d56bc-029a-4fbc-86ea-d0b8c8a8dfd9'"
57 | network_identifier = "'016d56bc-029a-4fbc-86ea-d0b8c8a8dfd9'"
58 | sensor_identifier = "'016d56bc-029a-4fbc-86ea-d0b8c8a8dfd9'"
59 | user_name = "'user'"
60 | network_name = "'network'"
61 | sensor_name = "'sensor'"
62 | password_hash = "'9c8cf1d1ca883388'"
63 | access_token_hash = "'9c8cf1d1ca883388'"
64 | attribute = "'attribute'"
65 | value = 3.14
66 | configuration = "'{}'"
67 | revision = 0
68 | creation_timestamp = "'1970-01-01T00:00:00+00:00'"
69 | acknowledgment_timestamp = "'1970-01-01T00:00:00+00:00'"
70 | severity = "'info'"
71 | message= "'message'"
72 | direction = "'next'"
73 | success = "TRUE"
74 |
75 | [build-system]
76 | requires = ["poetry-core"]
77 | build-backend = "poetry.core.masonry.api"
78 |
--------------------------------------------------------------------------------
/server/schema.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS "pg_stat_statements";
2 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
3 | CREATE EXTENSION IF NOT EXISTS "timescaledb";
4 |
5 |
6 | CREATE TABLE "user" (
7 | identifier UUID PRIMARY KEY,
8 | name TEXT UNIQUE NOT NULL,
9 | creation_timestamp TIMESTAMPTZ NOT NULL,
10 | password_hash TEXT NOT NULL
11 | );
12 |
13 |
14 | CREATE TABLE network (
15 | identifier UUID PRIMARY KEY,
16 | name TEXT UNIQUE NOT NULL,
17 | creation_timestamp TIMESTAMPTZ NOT NULL
18 | );
19 |
20 |
21 | CREATE TABLE sensor (
22 | identifier UUID PRIMARY KEY,
23 | name TEXT NOT NULL,
24 | network_identifier UUID NOT NULL REFERENCES network (identifier) ON DELETE CASCADE,
25 | creation_timestamp TIMESTAMPTZ NOT NULL,
26 |
27 | -- Add more parameters here? e.g. description (that do not get relayed to the sensor)
28 |
29 | UNIQUE (network_identifier, name)
30 | );
31 |
32 |
33 | CREATE TABLE permission (
34 | user_identifier UUID NOT NULL REFERENCES "user" (identifier) ON DELETE CASCADE,
35 | network_identifier UUID NOT NULL REFERENCES network (identifier) ON DELETE CASCADE,
36 | creation_timestamp TIMESTAMPTZ NOT NULL,
37 | PRIMARY KEY (user_identifier, network_identifier)
38 | );
39 |
40 |
41 | CREATE TABLE session (
42 | access_token_hash TEXT PRIMARY KEY,
43 | user_identifier UUID NOT NULL REFERENCES "user" (identifier) ON DELETE CASCADE,
44 | creation_timestamp TIMESTAMPTZ NOT NULL
45 | );
46 |
47 |
48 | -- Contains only values that are actually sent to the sensor, not metadata
49 | CREATE TABLE configuration (
50 | sensor_identifier UUID NOT NULL REFERENCES sensor (identifier) ON DELETE CASCADE,
51 | value JSONB NOT NULL,
52 | revision INT NOT NULL,
53 | creation_timestamp TIMESTAMPTZ NOT NULL,
54 | publication_timestamp TIMESTAMPTZ,
55 | acknowledgment_timestamp TIMESTAMPTZ,
56 | receipt_timestamp TIMESTAMPTZ,
57 | success BOOLEAN
58 | );
59 |
60 | -- Defining the primary key manually with the sort order makes the query for the latest
61 | -- revision faster
62 | CREATE UNIQUE INDEX ON configuration (sensor_identifier ASC, revision DESC);
63 |
64 |
65 | -- Measurements don't have a unique primary key. Enforcing that the combination of
66 | -- (sensor_identifier, creation_timestamp, attribute) is unique filters out duplicates
67 | -- but having these duplicates usually means that something is wrong on the sensor.
68 | -- In this case, the server should store everything it receives and duplicates should be
69 | -- filtered out during processing with manual oversight. The keyset pagination over the
70 | -- measurements chooses arbitrarily between duplicates.
71 | CREATE TABLE measurement (
72 | sensor_identifier UUID NOT NULL REFERENCES sensor (identifier) ON DELETE CASCADE,
73 | receipt_timestamp TIMESTAMPTZ NOT NULL,
74 | attribute TEXT NOT NULL,
75 | value DOUBLE PRECISION NOT NULL,
76 | creation_timestamp TIMESTAMPTZ NOT NULL,
77 | revision INT
78 | );
79 |
80 | CREATE INDEX ON measurement (sensor_identifier ASC, creation_timestamp ASC);
81 |
82 | SELECT create_hypertable(
83 | relation => 'measurement',
84 | time_column_name => 'creation_timestamp');
85 |
86 |
87 | CREATE MATERIALIZED VIEW measurement_aggregation_1_hour
88 | WITH (timescaledb.continuous, timescaledb.materialized_only = true, timescaledb.create_group_indexes = false) AS
89 | SELECT
90 | sensor_identifier,
91 | attribute,
92 | avg(value)::DOUBLE PRECISION AS average,
93 | time_bucket('1 hour', creation_timestamp) AS bucket_timestamp
94 | FROM measurement
95 | GROUP BY sensor_identifier, attribute, bucket_timestamp
96 | WITH DATA;
97 |
98 |
99 | CREATE INDEX ON measurement_aggregation_1_hour (sensor_identifier ASC, bucket_timestamp ASC, attribute ASC);
100 |
101 | SELECT add_continuous_aggregate_policy(
102 | continuous_aggregate => 'measurement_aggregation_1_hour',
103 | start_offset => '10 days',
104 | end_offset => '1 hour',
105 | schedule_interval => '1 hour');
106 |
107 |
108 | -- Logs don't have a unique primary key. Enforcing uniqueness over the combination
109 | -- of (sensor_identifier, creation_timestamp) could filter out duplicates, but also
110 | -- incorrectly reject valid logs with the same timestamp. The keyset pagination's cursor
111 | -- is thus not unique. This means that elements can potentially be skipped when they
112 | -- are at the edges of a page. We accept this trade-off in favor of performance.
113 | -- If this ever becomes a problem, we can generate a column that reliably makes the
114 | -- combination unique and use that for the keyset pagination.
115 | CREATE TABLE log (
116 | sensor_identifier UUID NOT NULL REFERENCES sensor (identifier) ON DELETE CASCADE,
117 | message TEXT NOT NULL,
118 | severity TEXT NOT NULL,
119 | revision INT,
120 | creation_timestamp TIMESTAMPTZ NOT NULL,
121 | receipt_timestamp TIMESTAMPTZ NOT NULL
122 | );
123 |
124 | SELECT create_hypertable(
125 | relation => 'log',
126 | time_column_name => 'creation_timestamp');
127 |
128 | SELECT add_retention_policy(
129 | relation => 'log',
130 | drop_after => INTERVAL '8 weeks');
131 |
--------------------------------------------------------------------------------
/server/scripts/README.md:
--------------------------------------------------------------------------------
1 | # Development scripts
2 |
3 | - `build`: Build the Docker image
4 | - `check`: Format and lint the code
5 | - `develop`: Start a development instance with pre-populated example data
6 | - `initialize`: Initialize the database; Use `--populate` option to populate with example data
7 | - `jupyter`: Start a Jupyter server in the current environment
8 | - `setup`: Setup or update the dependencies after a `git clone` or `git pull`
9 | - `test`: Run the tests
10 |
11 | Styled after GitHub's ["Scripts to Rule Them All"](https://github.com/github/scripts-to-rule-them-all).
12 |
--------------------------------------------------------------------------------
/server/scripts/build:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Get the current commit hash and branch name
9 | COMMIT_SHA="$(git rev-parse --verify HEAD)"
10 | BRANCH_NAME="$(git branch --show-current)"
11 |
12 | # Build the docker image
13 | docker build --build-arg commit_sha="${COMMIT_SHA}" --build-arg branch_name="${BRANCH_NAME}" --tag tenta .
14 | yes | docker image prune
15 |
--------------------------------------------------------------------------------
/server/scripts/check:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | poetry run black ./app ./tests ./scripts
9 | poetry run ruff --fix ./app ./tests ./scripts
10 | poetry run sqlfluff lint -v --disable-progress-bar ./app/queries.sql
11 |
--------------------------------------------------------------------------------
/server/scripts/develop:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Set our environment variables
9 | export ENVIRONMENT="development"
10 | export COMMIT_SHA=$(git rev-parse --verify HEAD)
11 | export BRANCH_NAME=$(git branch --show-current)
12 | export POSTGRESQL_HOSTNAME="localhost"
13 | export POSTGRESQL_PORT="5432"
14 | export POSTGRESQL_IDENTIFIER="postgres"
15 | export POSTGRESQL_PASSWORD="12345678"
16 | export POSTGRESQL_DATABASE="database"
17 | export MQTT_HOSTNAME="localhost"
18 | export MQTT_PORT="1883"
19 | export MQTT_IDENTIFIER="server"
20 | export MQTT_PASSWORD="password"
21 |
22 | # Path to our Mosquitto configuation
23 | MOSQUITTO_CONFIGURATION="$(pwd)/tests/mosquitto.conf"
24 |
25 | # Start PostgreSQL via docker in the background
26 | echo "Starting PostgreSQL+TimescaleDB with Docker"
27 | docker run -td --rm --name postgres -p 127.0.0.1:5432:5432 --hostname default -e POSTGRES_USER="${POSTGRESQL_IDENTIFIER}" -e POSTGRES_PASSWORD="${POSTGRESQL_PASSWORD}" -e POSTGRES_DB="${POSTGRESQL_DATABASE}" timescale/timescaledb:latest-pg15 >/dev/null
28 | # Start the Mosquitto MQTT broker via docker in the background
29 | echo "Starting Mosquitto with Docker"
30 | docker run -td --rm --name mosquitto -p 127.0.0.1:1883:1883 --volume "${MOSQUITTO_CONFIGURATION}:/mosquitto/config/mosquitto.conf" eclipse-mosquitto:latest >/dev/null
31 | # Wait for PostgreSQL to be ready; Avoid the double start problem by passing the external IP address with the -h option, see: https://github.com/docker-library/postgres/issues/146#issuecomment-561557320
32 | echo "Waiting for PostgreSQL+TimescaleDB to be ready"
33 | until docker exec postgres pg_isready --host default >/dev/null; do sleep 0.1; done
34 | # Wait for Mosquitto to be ready
35 | echo "Waiting for Mosquitto to be ready"
36 | until docker exec mosquitto mosquitto_sub -E --topic "#"; do sleep 0.1; done
37 |
38 | # Run the database initialization script
39 | echo "Initializing the database"
40 | ./scripts/initialize --populate ||:
41 |
42 | # Run in development mode
43 | poetry run uvicorn app.main:app --host 127.0.0.1 --port 8421 --reload --reload-include "./app/*.sql" --reload-exclude "./tests/*" --reload-exclude="./scripts/*" --log-level debug || status=$?
44 | # Stop and remove the Mosquitto docker container
45 | docker stop mosquitto >/dev/null
46 | # Stop and remove the PostgreSQL docker container
47 | docker stop postgres >/dev/null
48 | # Exit with captured status code
49 | exit "${status=0}"
50 |
--------------------------------------------------------------------------------
/server/scripts/initialize:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Initialize the database
9 | poetry run python -m scripts.initialize "$@"
10 |
--------------------------------------------------------------------------------
/server/scripts/initialize.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import asyncio
3 |
4 | import tests.conftest
5 |
6 |
7 | async def initialize(populate=False):
8 | """Initialize the database schema, optionally populate with example data."""
9 | async with tests.conftest._connection() as connection:
10 | with open("schema.sql") as file:
11 | for statement in file.read().split("\n\n\n"):
12 | await connection.execute(statement)
13 | if populate:
14 | await tests.conftest._populate(connection, tests.conftest._offset())
15 |
16 |
17 | if __name__ == "__main__":
18 | parser = argparse.ArgumentParser()
19 | parser.add_argument("--populate", action="store_true")
20 | args = parser.parse_args()
21 | asyncio.run(initialize(populate=args.populate))
22 |
--------------------------------------------------------------------------------
/server/scripts/jupyter:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Set our environment variables
9 | export ENVIRONMENT="development"
10 | export COMMIT_SHA=$(git rev-parse --verify HEAD)
11 | export BRANCH_NAME=$(git branch --show-current)
12 | export POSTGRESQL_HOSTNAME="localhost"
13 | export POSTGRESQL_PORT="5432"
14 | export POSTGRESQL_IDENTIFIER="postgres"
15 | export POSTGRESQL_PASSWORD="12345678"
16 | export POSTGRESQL_DATABASE="database"
17 | export MQTT_HOSTNAME="localhost"
18 | export MQTT_PORT="1883"
19 | export MQTT_IDENTIFIER="server"
20 | export MQTT_PASSWORD="password"
21 |
22 | # Enable importing local modules
23 | export PYTHONPATH=$(pwd)
24 |
25 | # Start jupyter server
26 | poetry run python -m jupyterlab --no-browser --port 8532
27 |
--------------------------------------------------------------------------------
/server/scripts/setup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Install the dependencies
9 | poetry install --with dev --sync --no-root
10 |
--------------------------------------------------------------------------------
/server/scripts/test:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Safety first
4 | set -o errexit -o pipefail -o nounset
5 | # Change into the project's directory
6 | cd "$(dirname "$0")/.."
7 |
8 | # Set our environment variables
9 | export ENVIRONMENT="test"
10 | export COMMIT_SHA=$(git rev-parse --verify HEAD)
11 | export BRANCH_NAME=$(git branch --show-current)
12 | export POSTGRESQL_HOSTNAME="localhost"
13 | export POSTGRESQL_PORT="5432"
14 | export POSTGRESQL_IDENTIFIER="postgres"
15 | export POSTGRESQL_PASSWORD="12345678"
16 | export POSTGRESQL_DATABASE="database"
17 | export MQTT_HOSTNAME="localhost"
18 | export MQTT_PORT="1883"
19 | export MQTT_IDENTIFIER="server"
20 | export MQTT_PASSWORD="password"
21 |
22 | # Path to our Mosquitto configuation
23 | MOSQUITTO_CONFIGURATION="$(pwd)/tests/mosquitto.conf"
24 |
25 | # Start PostgreSQL via docker in the background
26 | echo "Starting PostgreSQL+TimescaleDB with Docker"
27 | docker run -td --rm --name postgres -p 127.0.0.1:5432:5432 --hostname default -e POSTGRES_USER="${POSTGRESQL_IDENTIFIER}" -e POSTGRES_PASSWORD="${POSTGRESQL_PASSWORD}" -e POSTGRES_DB="${POSTGRESQL_DATABASE}" timescale/timescaledb:latest-pg15 >/dev/null
28 | # Start the Mosquitto MQTT broker via docker in the background
29 | echo "Starting Mosquitto with Docker"
30 | docker run -td --rm --name mosquitto -p 127.0.0.1:1883:1883 --volume "${MOSQUITTO_CONFIGURATION}:/mosquitto/config/mosquitto.conf" eclipse-mosquitto:latest >/dev/null
31 | # Wait for PostgreSQL to be ready; Avoid the double start problem by passing the external IP address with the -h option, see: https://github.com/docker-library/postgres/issues/146#issuecomment-561557320
32 | echo "Waiting for PostgreSQL+TimescaleDB to be ready"
33 | until docker exec postgres pg_isready --host default >/dev/null; do sleep 0.1; done
34 | # Wait for Mosquitto to be ready
35 | echo "Waiting for Mosquitto to be ready"
36 | until docker exec mosquitto mosquitto_sub -E --topic "#"; do sleep 0.1; done
37 |
38 | # Run the database initialization script
39 | echo "Initializing the database"
40 | ./scripts/initialize ||:
41 |
42 | # Run the tests
43 | poetry run pytest --cov=app --cov-report=term-missing tests "$@" || status=$?
44 | # Stop and remove the Mosquitto docker container
45 | docker stop mosquitto >/dev/null
46 | # Stop and remove the PostgreSQL docker container
47 | docker stop postgres >/dev/null
48 | # Exit with captured status code
49 | exit "${status=0}"
50 |
--------------------------------------------------------------------------------
/server/tests/README.md:
--------------------------------------------------------------------------------
1 | - The `data.json` file contains the example data that is loaded into the database during tests. It is also used to populate the database when you run the `./scripts/develop` script. Editing this file will break the tests but can be useful during development.
2 | - All timestamps in `data.json` are adjusted to the current time when they are written to the database. This ensures that the tests match the continuous aggregations and retention policies. A timestamp of `0` represents the current time rounded down to the nearest hour.
3 | - The tests expect available PostgreSQL+TimescaleDB and Mosquitto instances. This is consistent with the production environment. These services are automatically spun up locally inside the `./scripts/test` and `./scripts/develop` scripts.
4 |
--------------------------------------------------------------------------------
/server/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/iterize/tenta/48274dd016049a9cb4202c7cb7aebf861a8d50ce/server/tests/__init__.py
--------------------------------------------------------------------------------
/server/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import contextlib
3 | import json
4 | import os
5 |
6 | import asyncpg
7 | import pytest
8 |
9 | import app.database as database
10 | import app.utils as utils
11 |
12 |
13 | ########################################################################################
14 | # Configure pytest-asyncio
15 | ########################################################################################
16 |
17 |
18 | @pytest.fixture(scope="session")
19 | def event_loop():
20 | loop = asyncio.get_event_loop_policy().new_event_loop()
21 | yield loop
22 | loop.close()
23 |
24 |
25 | ########################################################################################
26 | # Test data
27 | ########################################################################################
28 |
29 |
30 | @pytest.fixture(scope="session")
31 | def identifier():
32 | return "00000000-0000-4000-8000-000000000000"
33 |
34 |
35 | @pytest.fixture(scope="session")
36 | def user_identifier():
37 | return "575a7328-4e2e-4b88-afcc-e0b5ed3920cc"
38 |
39 |
40 | @pytest.fixture(scope="session")
41 | def network_identifier():
42 | return "1f705cc5-4242-458b-9201-4217455ea23c"
43 |
44 |
45 | @pytest.fixture(scope="session")
46 | def sensor_identifier():
47 | return "81bf7042-e20f-4a97-ac44-c15853e3618f"
48 |
49 |
50 | ########################################################################################
51 | # Database setup
52 | ########################################################################################
53 |
54 |
55 | @contextlib.asynccontextmanager
56 | async def _connection():
57 | """Provide a connection to the database that's properly closed afterwards."""
58 | try:
59 | connection = await asyncpg.connect(
60 | host=os.environ["POSTGRESQL_HOSTNAME"],
61 | port=os.environ["POSTGRESQL_PORT"],
62 | user=os.environ["POSTGRESQL_IDENTIFIER"],
63 | password=os.environ["POSTGRESQL_PASSWORD"],
64 | database=os.environ["POSTGRESQL_DATABASE"],
65 | )
66 | await database.initialize(connection)
67 | yield connection
68 | finally:
69 | await connection.close()
70 |
71 |
72 | @pytest.fixture(scope="session")
73 | async def connection():
74 | """Provide a database connection (persistent across tests)."""
75 | async with _connection() as connection:
76 | yield connection
77 |
78 |
79 | def _offset():
80 | """Return the current unix timestamp rounded down to the nearest hour."""
81 | return utils.timestamp() // 3600 * 3600
82 |
83 |
84 | @pytest.fixture(scope="session")
85 | def offset():
86 | """Provide the offset added to test timestamps in seconds."""
87 | return _offset()
88 |
89 |
90 | async def _populate(connection, offset):
91 | """Populate the database with example data."""
92 | with open("tests/data.json") as file:
93 | for table_name, elements in json.load(file).items():
94 | # Get the keys from the first record
95 | keys = tuple(elements[0].keys())
96 | # Generate the column names and identifiers for the query
97 | columns = ", ".join([f"{key}" for key in keys])
98 | identifiers = ", ".join([f"${i+1}" for i in range(len(keys))])
99 | # Adapt all timestamps with the offset
100 | for element in elements:
101 | for key, value in element.items():
102 | if key.endswith("_timestamp"):
103 | element[key] = None if value is None else value + offset
104 | # Write to the database
105 | await connection.executemany(
106 | f'INSERT INTO "{table_name}" ({columns}) VALUES ({identifiers});',
107 | [tuple(element[key] for key in keys) for element in elements],
108 | )
109 | # Refresh the materialized views
110 | await connection.execute("CALL refresh_continuous_aggregate('measurement_aggregation_1_hour', NULL, NULL);") # fmt: skip
111 |
112 |
113 | @pytest.fixture(scope="function")
114 | async def reset(connection, offset):
115 | """Reset the database to contain the initial test data for each test."""
116 | async with connection.transaction():
117 | # Delete all the data in the database but keep the structure
118 | await connection.execute('DELETE FROM "user";')
119 | await connection.execute("DELETE FROM network;")
120 | # Populate with the initial test data again
121 | await _populate(connection, offset)
122 |
--------------------------------------------------------------------------------
/server/tests/data.json:
--------------------------------------------------------------------------------
1 | {
2 | "user": [
3 | {
4 | "identifier": "575a7328-4e2e-4b88-afcc-e0b5ed3920cc",
5 | "name": "happy-un1c0rn",
6 | "creation_timestamp": -7200,
7 | "password_hash": "$argon2id$v=19$m=65536,t=3,p=4$JaT0nnOuVYqx9h6jtLa21g$iRkdLMm6Pigqop5+DVzLwNRIxyE2uP3ZdYKJjHo4LhM"
8 | },
9 | {
10 | "identifier": "79690605-8241-44f5-9c05-f70d63c6b144",
11 | "name": "deerclops",
12 | "creation_timestamp": -3600,
13 | "password_hash": "$argon2id$v=19$m=65536,t=3,p=4$U+o955zz3ru39t5by9lbyw$pePvVo0ZNPLFIhcYurEzyxgCZP1QojKEucQSAcg0NdE"
14 | }
15 | ],
16 | "network": [
17 | {
18 | "identifier": "1f705cc5-4242-458b-9201-4217455ea23c",
19 | "name": "default",
20 | "creation_timestamp": -7200
21 | },
22 | {
23 | "identifier": "a9727106-63d2-4a2e-9bbc-3203742d0d55",
24 | "name": "airquality",
25 | "creation_timestamp": -3600
26 | },
27 | {
28 | "identifier": "2f9a5285-4ce1-4ddb-a268-0164c70f4826",
29 | "name": "example",
30 | "creation_timestamp": 0
31 | }
32 | ],
33 | "sensor": [
34 | {
35 | "identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
36 | "name": "bulbasaur",
37 | "network_identifier": "1f705cc5-4242-458b-9201-4217455ea23c",
38 | "creation_timestamp": -7200
39 | },
40 | {
41 | "identifier": "2d2a3794-2345-4500-8baa-493f88123087",
42 | "name": "squirtle",
43 | "network_identifier": "1f705cc5-4242-458b-9201-4217455ea23c",
44 | "creation_timestamp": -3600
45 | },
46 | {
47 | "identifier": "23825517-4631-4beb-acd4-5545c57a9928",
48 | "name": "charmander",
49 | "network_identifier": "2f9a5285-4ce1-4ddb-a268-0164c70f4826",
50 | "creation_timestamp": -3600
51 | }
52 | ],
53 | "permission": [
54 | {
55 | "user_identifier": "575a7328-4e2e-4b88-afcc-e0b5ed3920cc",
56 | "network_identifier": "1f705cc5-4242-458b-9201-4217455ea23c",
57 | "creation_timestamp": -7200
58 | },
59 | {
60 | "user_identifier": "575a7328-4e2e-4b88-afcc-e0b5ed3920cc",
61 | "network_identifier": "a9727106-63d2-4a2e-9bbc-3203742d0d55",
62 | "creation_timestamp": -3600
63 | },
64 | {
65 | "user_identifier": "79690605-8241-44f5-9c05-f70d63c6b144",
66 | "network_identifier": "2f9a5285-4ce1-4ddb-a268-0164c70f4826",
67 | "creation_timestamp": 0
68 | }
69 | ],
70 | "session": [
71 | {
72 | "access_token_hash": "88a2419d0d862e44e44cdf98abda5e99b7655d12c7b7d4e8a4f3731a1095eac0709e798a75b8d669158e03e5bd13ad91e100c525ec90fb619f4b575d7f219a1a",
73 | "user_identifier": "575a7328-4e2e-4b88-afcc-e0b5ed3920cc",
74 | "creation_timestamp": -7200
75 | }
76 | ],
77 | "configuration": [
78 | {
79 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
80 | "value": {},
81 | "revision": 0,
82 | "creation_timestamp": -7200,
83 | "publication_timestamp": -7200,
84 | "acknowledgment_timestamp": -7200,
85 | "receipt_timestamp": -7200,
86 | "success": true
87 | },
88 | {
89 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
90 | "value": {
91 | "measurement_interval": 10.0,
92 | "strategy": "default"
93 | },
94 | "revision": 1,
95 | "creation_timestamp": -5400,
96 | "publication_timestamp": -5400,
97 | "acknowledgment_timestamp": -5400,
98 | "receipt_timestamp": -5400,
99 | "success": false
100 | },
101 | {
102 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
103 | "value": {
104 | "measurement_interval": 8.5,
105 | "cache": true,
106 | "strategy": "default"
107 | },
108 | "revision": 2,
109 | "creation_timestamp": -3600,
110 | "publication_timestamp": -3600,
111 | "acknowledgment_timestamp": null,
112 | "receipt_timestamp": null,
113 | "success": null
114 | }
115 | ],
116 | "measurement": [
117 | {
118 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
119 | "attribute": "temperature",
120 | "value": 7000.0,
121 | "revision": null,
122 | "creation_timestamp": -7200,
123 | "receipt_timestamp": -7200
124 | },
125 | {
126 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
127 | "attribute": "humidity",
128 | "value": -0.4,
129 | "revision": null,
130 | "creation_timestamp": -7200,
131 | "receipt_timestamp": -7200
132 | },
133 | {
134 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
135 | "attribute": "temperature",
136 | "value": 9000.0,
137 | "revision": null,
138 | "creation_timestamp": -5400,
139 | "receipt_timestamp": -5400
140 | },
141 | {
142 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
143 | "attribute": "humidity",
144 | "value": 0.0,
145 | "revision": 1,
146 | "creation_timestamp": -5400,
147 | "receipt_timestamp": -5400
148 | },
149 | {
150 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
151 | "attribute": "temperature",
152 | "value": 6000.0,
153 | "revision": 1,
154 | "creation_timestamp": -3600,
155 | "receipt_timestamp": -3600
156 | }
157 | ],
158 | "log": [
159 | {
160 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
161 | "message": "Everything is fine.",
162 | "severity": "info",
163 | "revision": null,
164 | "creation_timestamp": -7200,
165 | "receipt_timestamp": -7200
166 | },
167 | {
168 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
169 | "message": "Everything is fine.",
170 | "severity": "info",
171 | "revision": 0,
172 | "creation_timestamp": -5400,
173 | "receipt_timestamp": -5400
174 | },
175 | {
176 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
177 | "message": "The CPU is toasty; Get the marshmallows ready!",
178 | "severity": "warning",
179 | "revision": 0,
180 | "creation_timestamp": -3600,
181 | "receipt_timestamp": -3600
182 | },
183 | {
184 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
185 | "message": "The CPU is toasty; Get the marshmallows ready!",
186 | "severity": "warning",
187 | "revision": 1,
188 | "creation_timestamp": -1800,
189 | "receipt_timestamp": -1800
190 | },
191 | {
192 | "sensor_identifier": "81bf7042-e20f-4a97-ac44-c15853e3618f",
193 | "message": "The CPU is burning; Please call the fire department.",
194 | "severity": "error",
195 | "revision": 1,
196 | "creation_timestamp": 0,
197 | "receipt_timestamp": 0
198 | }
199 | ]
200 | }
--------------------------------------------------------------------------------
/server/tests/mosquitto.conf:
--------------------------------------------------------------------------------
1 | listener 1883
2 | protocol mqtt
3 | persistence false
4 | log_dest stderr
5 | allow_anonymous true
6 |
--------------------------------------------------------------------------------
/server/tests/test_mqtt.py:
--------------------------------------------------------------------------------
1 | import app.mqtt as mqtt
2 | import app.validation as validation
3 |
4 |
5 | ########################################################################################
6 | # Acknowledgments
7 | ########################################################################################
8 |
9 |
10 | async def test_handle_acknowledgments(reset, connection, sensor_identifier):
11 | """Test handling an acknowledgments message."""
12 | await mqtt._handle_acknowledgments(
13 | sensor_identifier,
14 | [validation.Acknowledgment(success=True, timestamp=0, revision=0)],
15 | connection,
16 | )
17 |
18 |
19 | async def test_handle_acknowledgments_with_multiple(
20 | reset, connection, sensor_identifier
21 | ):
22 | """Test handling a batched acknowledgments message."""
23 | await mqtt._handle_acknowledgments(
24 | sensor_identifier,
25 | [validation.Acknowledgment(success=True, timestamp=0, revision=0)] * 2,
26 | connection,
27 | )
28 |
29 |
30 | async def test_handle_acknowledgments_with_nonexistent_sensor(
31 | reset, connection, identifier
32 | ):
33 | """Test handling an acknowledgments message for a nonexistent sensor."""
34 | await mqtt._handle_acknowledgments(
35 | identifier,
36 | [validation.Acknowledgment(success=True, timestamp=0, revision=0)],
37 | connection,
38 | )
39 |
40 |
41 | ########################################################################################
42 | # Measurements
43 | ########################################################################################
44 |
45 |
46 | async def test_handle_measurements(reset, connection, sensor_identifier):
47 | """Test handling a measurements message."""
48 | await mqtt._handle_measurements(
49 | sensor_identifier,
50 | [validation.Measurement(value={"temperature": 0}, timestamp=0)],
51 | connection,
52 | )
53 |
54 |
55 | async def test_handle_measurements_with_multiple(reset, connection, sensor_identifier):
56 | """Test handling a batched measurements message."""
57 | await mqtt._handle_measurements(
58 | sensor_identifier,
59 | [validation.Measurement(value={"temperature": 0}, timestamp=0)] * 2,
60 | connection,
61 | )
62 |
63 |
64 | async def test_handle_measurements_with_nonexistent_sensor(
65 | reset, connection, identifier
66 | ):
67 | """Test handling a measurements message for a nonexistent sensor."""
68 | await mqtt._handle_measurements(
69 | identifier,
70 | [validation.Measurement(value={"temperature": 0}, timestamp=0)],
71 | connection,
72 | )
73 |
74 |
75 | ########################################################################################
76 | # Logs
77 | ########################################################################################
78 |
79 |
80 | async def test_handle_logs(reset, connection, sensor_identifier):
81 | """Test handling a logs message."""
82 | await mqtt._handle_logs(
83 | sensor_identifier,
84 | [validation.Log(message="", severity="info", timestamp=0)],
85 | connection,
86 | )
87 |
88 |
89 | async def test_handle_logs_with_multiple(reset, connection, sensor_identifier):
90 | """Test handling a batched logs message."""
91 | await mqtt._handle_logs(
92 | sensor_identifier,
93 | [validation.Log(message="", severity="info", timestamp=0)] * 2,
94 | connection,
95 | )
96 |
97 |
98 | async def test_handle_logs_with_nonexistent_sensor(reset, connection, identifier):
99 | """Test handling a logs message for a nonexistent sensor."""
100 | await mqtt._handle_logs(
101 | identifier,
102 | [validation.Log(message="", severity="info", timestamp=0)],
103 | connection,
104 | )
105 |
--------------------------------------------------------------------------------