82 |
83 | {% endblock content %}
84 |
85 | {% block javascripts %}
86 | {{ super()}}
87 |
89 |
104 | {% endblock javascripts %}
105 |
--------------------------------------------------------------------------------
/dtbase/frontend/app/sensors/templates/readings.html:
--------------------------------------------------------------------------------
1 | {% extends "base_site.html" %}
2 |
3 | {% block title %} {{sensor_type}} Sensors {% endblock title %}
4 |
5 | {% block stylesheets %}
6 | {{ super() }}
7 |
8 |
9 |
10 |
11 | {% endblock stylesheets %}
12 |
13 | {% block content %}
14 |
Sensor readings
15 |
45 |
46 | {% if selected_sensor %}
47 |
48 |
49 |
50 |
51 |
Uploaded {{selected_sensor_type}} Sensor Data for {{ selected_sensor }} (showing max {{num_records}} records)
52 |
53 |
54 |
55 |
56 |
57 | | Timestamp |
58 | {% for m in measures: %}
59 | {{m["name"]}} ({{m["units"]}}) |
60 | {% endfor %}
61 |
62 |
63 |
64 | {% for sd in sensor_data: %}
65 |
66 | | {{ sd['timestamp'] }} |
67 | {% for m in measures: %}
68 | {{ '%0.2f' | format(sd[m["id"]]|float) }} |
69 | {% endfor %}
70 |
71 | {% endfor %}
72 |
73 |
74 |
75 |
76 | {% endif %}
77 | {% endblock content %}
78 |
79 | {% block javascripts %}
80 | {{ super() }}
81 |
82 |
83 |
97 |
98 | {% endblock javascripts %}
99 |
--------------------------------------------------------------------------------
/dtbase/models/utils/sensor_data.py:
--------------------------------------------------------------------------------
1 | """
2 | Data access module for models.
3 | """
4 | import datetime as dt
5 | import logging
6 | from typing import Any, Optional
7 |
8 | import pandas as pd
9 | from dateutil.parser import parse
10 |
11 | from dtbase.core.exc import BackendCallError
12 | from dtbase.core.utils import auth_backend_call, login
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | def get_sensor_data(
18 | sensors: list[str],
19 | measures: list[dict[str, str]],
20 | dt_from: dt.datetime,
21 | dt_to: dt.datetime,
22 | token: Optional[str] = None,
23 | ) -> list[dict[str, Any]]:
24 | """Fetch data from one or more measures/sensors.
25 |
26 | Args:
27 | sensors (list[str]): A list of sensor unique ids.
28 | measures (list[dict[str, str]]): A list of dictionaries, each containing the
29 | measure name and units.
30 | dt_from (dt.datetime): The start date and time for the data.
31 | dt_to (dt.datetime): The end date and time for the data.
32 | token (str): An authencation token for the backend. Optional.
33 |
34 | Returns:
35 | list[dict[str, Any]]: A list of dictionaries, each containing the sensor unique
36 | id, the measure name and units, and the data as a DataFrame.
37 | """
38 | if token is None:
39 | token = login()[0]
40 |
41 | data = []
42 | for measure in measures:
43 | for sensor in sensors:
44 | response = auth_backend_call(
45 | "post",
46 | "/sensor/sensor-readings",
47 | {
48 | "measure": measure,
49 | "unique_identifier": sensor,
50 | "dt_from": dt_from.isoformat(),
51 | "dt_to": dt_to.isoformat(),
52 | },
53 | token=token,
54 | )
55 | if response.status_code != 200:
56 | raise BackendCallError(response)
57 | readings = response.json()
58 | df = pd.DataFrame(
59 | [
60 | {
61 | "value": r["value"],
62 | "timestamp": parse(r["timestamp"]),
63 | }
64 | for r in readings
65 | ]
66 | ).set_index("timestamp")
67 | if len(df) > 0:
68 | data.append(
69 | {
70 | "sensor_unique_id": sensor,
71 | "measure": measure,
72 | "data": df,
73 | }
74 | )
75 | return data
76 |
77 |
78 | def downsample_data(
79 | data: pd.DataFrame, frequency: dt.timedelta | pd.Timedelta
80 | ) -> pd.DataFrame:
81 | """
82 | Downsample the sensor readings to the specified frequency.
83 |
84 | For instance, if the frequency is 1 hour, the function will average the sensor
85 | readings from for instance 12:30 to 13:30, and label the average as a reading for
86 | 13:00. If there are no readings in the interval, the value will be None.
87 |
88 | Args:
89 | data: A pandas DataFrame with a datetime index and a column
90 | called "value".
91 | frequency: A pandas or datetime timedelta object specifying the time interval to
92 | resample the data to.
93 |
94 | Returns:
95 | A pandas DataFrame with a datetime index, with the values from the "value"
96 | column resampled to the specified frequency.
97 | """
98 | resampled_data = (
99 | data["value"]
100 | # The offset by frequency / 2 makes the bins be centered around the full hour.
101 | # So a bin goes from e.g. 12:30 to 13:30, rather than 12:00 to 13:00.
102 | .resample(frequency, offset=frequency / 2).mean()
103 | )
104 | # The index is shifted by frequency / 2 to make the bin labels centered around the
105 | # full hour. For instance the bin 12:30 to 13:30 is labeled as 13:00.
106 | resampled_data.index = resampled_data.index + frequency / 2
107 | return resampled_data
108 |
--------------------------------------------------------------------------------
/tests/test_db.py:
--------------------------------------------------------------------------------
1 | """
2 | Test the basic structure of the database, creating rows and relations via
3 | the SQLAlchemy ORM.
4 | """
5 |
6 | from sqlalchemy.orm import Session
7 |
8 | from dtbase.backend.database.structure import (
9 | Location,
10 | LocationBooleanValue,
11 | LocationFloatValue,
12 | LocationIdentifier,
13 | LocationIntegerValue,
14 | LocationSchema,
15 | LocationSchemaIdentifierRelation,
16 | LocationStringValue,
17 | )
18 |
19 |
20 | def test_add_zadf_location(session: Session) -> None:
21 | si = LocationIdentifier(name="zone", datatype="string")
22 | ii = LocationIdentifier(name="aisle", datatype="integer")
23 | fi = LocationIdentifier(name="distance", units="m", datatype="float")
24 | bi = LocationIdentifier(name="upper shelf", datatype="boolean")
25 | session.add(si)
26 | session.add(ii)
27 | session.add(fi)
28 | session.add(bi)
29 | session.commit()
30 | assert isinstance(si.id, int)
31 | assert isinstance(ii.id, int)
32 | assert isinstance(fi.id, int)
33 | assert isinstance(bi.id, int)
34 |
35 | schema = LocationSchema(name="zone-aisle-distance-shelf")
36 | session.add(schema)
37 | session.commit()
38 | assert isinstance(schema.id, int)
39 |
40 | sid_s = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=si.id)
41 | sid_i = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=ii.id)
42 | sid_f = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=fi.id)
43 | sid_b = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=bi.id)
44 | session.add(sid_s)
45 | session.add(sid_i)
46 | session.add(sid_f)
47 | session.add(sid_b)
48 | session.commit()
49 | assert isinstance(sid_s.id, int)
50 | assert isinstance(sid_i.id, int)
51 | assert isinstance(sid_f.id, int)
52 | assert isinstance(sid_b.id, int)
53 | loc = Location(schema_id=schema.id)
54 | session.add(loc)
55 | session.commit()
56 | assert isinstance(loc.id, int)
57 |
58 | zone = LocationStringValue(value="Zone A", identifier_id=si.id, location_id=loc.id)
59 | aisle = LocationIntegerValue(value=23, identifier_id=ii.id, location_id=loc.id)
60 | distance = LocationFloatValue(value=3.1, identifier_id=fi.id, location_id=loc.id)
61 | shelf = LocationBooleanValue(value=True, identifier_id=bi.id, location_id=loc.id)
62 | session.add(zone)
63 | session.add(aisle)
64 | session.add(distance)
65 | session.add(shelf)
66 | session.commit()
67 | assert isinstance(zone.id, int)
68 | assert isinstance(aisle.id, int)
69 | assert isinstance(distance.id, int)
70 | assert isinstance(shelf.id, int)
71 | session.close()
72 |
73 |
74 | def test_add_xyz_location(session: Session) -> None:
75 | x = LocationIdentifier(name="x", units="m", datatype="float")
76 | y = LocationIdentifier(name="y", units="m", datatype="float")
77 | z = LocationIdentifier(name="z", units="m", datatype="float")
78 | session.add(x)
79 | session.add(y)
80 | session.add(z)
81 |
82 | schema = LocationSchema(name="xyz")
83 | session.add(schema)
84 | session.flush()
85 |
86 | x_sid = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=x.id)
87 | y_sid = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=y.id)
88 | z_sid = LocationSchemaIdentifierRelation(schema_id=schema.id, identifier_id=z.id)
89 | session.add(x_sid)
90 | session.add(y_sid)
91 | session.add(z_sid)
92 | session.commit()
93 |
94 | loc = Location(schema_id=schema.id)
95 | session.add(loc)
96 | session.commit()
97 | xval = LocationFloatValue(value=2.0, identifier_id=x.id, location_id=loc.id)
98 | yval = LocationFloatValue(value=3.0, identifier_id=y.id, location_id=loc.id)
99 | zval = LocationFloatValue(value=4.0, identifier_id=z.id, location_id=loc.id)
100 | session.add(xval)
101 | session.add(yval)
102 | session.add(zval)
103 | session.commit()
104 | # see if we can retrieve the x,y,z values from the location
105 | coords = loc.float_values_relationship
106 | assert coords[0].value == 2.0
107 | assert coords[1].value == 3.0
108 | assert coords[2].value == 4.0
109 | session.close()
110 |
--------------------------------------------------------------------------------
/dtbase/frontend/app/base/templates/site_template/sidebar.html:
--------------------------------------------------------------------------------
1 |
100 |
--------------------------------------------------------------------------------
/dtbase/backend/auth.py:
--------------------------------------------------------------------------------
1 | """
2 | Module (routes.py) to handle API endpoints related to authentication
3 | """
4 | import datetime as dt
5 |
6 | import jwt
7 | from fastapi import Depends, HTTPException, status
8 | from fastapi.security import OAuth2PasswordBearer
9 | from pydantic import ValidationError
10 | from sqlalchemy.orm import Session
11 |
12 | from dtbase.backend.database import users
13 | from dtbase.backend.database.utils import db_session
14 | from dtbase.backend.models import ParsedToken, TokenPair
15 | from dtbase.core.constants import (
16 | JWT_ACCESS_TOKEN_EXPIRES,
17 | JWT_REFRESH_TOKEN_EXPIRES,
18 | JWT_SECRET_KEY,
19 | )
20 |
21 | JWT_ALGORITHM = "HS256"
22 |
23 | # Creating tokens
24 |
25 |
26 | def _create_access_token(data: dict, expires_delta: dt.timedelta) -> str:
27 | """Create access token with an expiry limit."""
28 | to_encode = data.copy()
29 | expire = dt.datetime.now(dt.timezone.utc) + expires_delta
30 | to_encode.update({"exp": expire})
31 | if JWT_SECRET_KEY is None:
32 | raise ValueError("JWT_SECRET_KEY is not set")
33 | encoded_jwt = jwt.encode(to_encode, JWT_SECRET_KEY, algorithm=JWT_ALGORITHM)
34 | return encoded_jwt
35 |
36 |
37 | def create_token_pair(email: str) -> TokenPair:
38 | """Create a new authentication token pair for a user.
39 |
40 | The access token is what the user needs to authenticate themselves to the API. The
41 | refresh token has an expiry time that is a bit longer, and it can be used to get a
42 | new access token. So if you access token expires and your relative prompt, you can
43 | still keep your login alive without resending the username and password.
44 | """
45 | access_token = _create_access_token(
46 | {"sub": email, "token_type": "access"}, expires_delta=JWT_ACCESS_TOKEN_EXPIRES
47 | )
48 | refresh_token = _create_access_token(
49 | {"sub": email, "token_type": "refresh"}, expires_delta=JWT_REFRESH_TOKEN_EXPIRES
50 | )
51 | return TokenPair(access_token=access_token, refresh_token=refresh_token)
52 |
53 |
54 | # Authenticating tokens
55 |
56 | oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
57 |
58 |
59 | def _authenticate_token(
60 | token: str = Depends(oauth2_scheme), session: Session = Depends(db_session)
61 | ) -> ParsedToken:
62 | """Get the authentication token and validate it.
63 |
64 | Checks that the token belongs to a valid user and isn't expired.
65 |
66 | Args:
67 | token (str): The authentication token.
68 | session (Session): The database session.
69 |
70 | Returns:
71 | dict: The payload of the token.
72 |
73 | Raises:
74 | ValueError: If JWT_SECRET_KEY is not set.
75 | HTTPException: If the token is invalid.
76 | """
77 | if JWT_SECRET_KEY is None:
78 | raise ValueError("JWT_SECRET_KEY is not set")
79 | credentials_exception = HTTPException(
80 | status_code=status.HTTP_401_UNAUTHORIZED,
81 | detail="Could not validate credentials",
82 | )
83 | try:
84 | payload = jwt.decode(
85 | token,
86 | JWT_SECRET_KEY,
87 | algorithms=[JWT_ALGORITHM],
88 | require=["exp", "sub", "token_type"],
89 | )
90 | parsed_token = ParsedToken(**payload)
91 | except jwt.ExpiredSignatureError:
92 | raise HTTPException(
93 | status_code=status.HTTP_401_UNAUTHORIZED, detail="Token has expired"
94 | )
95 | except (jwt.InvalidTokenError, ValidationError):
96 | raise credentials_exception
97 | if not users.user_exists(parsed_token.sub, session=session):
98 | raise credentials_exception
99 | return parsed_token
100 |
101 |
102 | def authenticate_access(
103 | parsed_token: ParsedToken = Depends(_authenticate_token),
104 | ) -> ParsedToken:
105 | """Check that a token is a valid access token.
106 |
107 | Raise an error if not, return the parsed token.
108 | """
109 | if parsed_token.token_type != "access":
110 | raise HTTPException(
111 | status_code=status.HTTP_401_UNAUTHORIZED,
112 | detail="Token of wrong type",
113 | )
114 | return parsed_token
115 |
116 |
117 | def authenticate_refresh(
118 | parsed_token: ParsedToken = Depends(_authenticate_token),
119 | ) -> ParsedToken:
120 | """Check that a token is a valid refresh token.
121 |
122 | Raise an error if not, return the parsed token.
123 | """
124 | if parsed_token.token_type != "refresh":
125 | raise HTTPException(
126 | status_code=status.HTTP_401_UNAUTHORIZED,
127 | detail="Token of wrong type",
128 | )
129 | return parsed_token
130 |
--------------------------------------------------------------------------------
/tests/test_frontend_users.py:
--------------------------------------------------------------------------------
1 | """
2 | Test that the DTBase users pages load
3 | """
4 | import re
5 |
6 | import requests_mock
7 | from flask.testing import FlaskClient
8 |
9 |
10 | def test_users_index_backend(auth_frontend_client: FlaskClient) -> None:
11 | with auth_frontend_client as client:
12 | response = client.get("/users/index", follow_redirects=True)
13 | assert response.status_code == 200
14 | html_content = response.data.decode("utf-8")
15 | assert "List of all users" in html_content
16 |
17 | # Find the password toggle button
18 | eye_button = re.search(
19 | r"
]*id=\"show-password\"[^>]*>", response.data.decode()
20 | )
21 | assert eye_button is not None, "Password toggle button not found"
22 |
23 |
24 | def test_users_index_get_mock(mock_auth_frontend_client: FlaskClient) -> None:
25 | with mock_auth_frontend_client as client:
26 | with requests_mock.Mocker() as m:
27 | m.get(
28 | "http://localhost:5000/user/list-users",
29 | json=["user1@example.com", "user2@example.com"],
30 | )
31 | response = client.get("/users/index")
32 | assert response.status_code == 200
33 | html_content = response.data.decode("utf-8")
34 | assert "List of all users" in html_content
35 | assert "user1@example.com" in html_content
36 | assert "user2@example.com" in html_content
37 |
38 |
39 | def test_users_index_post_mock_create_user(
40 | mock_auth_frontend_client: FlaskClient,
41 | ) -> None:
42 | with mock_auth_frontend_client as client:
43 | with requests_mock.Mocker() as m:
44 | m.get("http://localhost:5000/user/list-users", json=[])
45 | m.post("http://localhost:5000/user/create-user", status_code=201)
46 | response = client.post(
47 | "/users/index",
48 | data={
49 | "email": "newuser@example.com",
50 | "password": "password",
51 | "submitNewUser": "",
52 | },
53 | )
54 | assert response.status_code == 200
55 | html_content = response.data.decode("utf-8")
56 | assert "User created successfully" in html_content
57 |
58 |
59 | def test_users_index_post_mock_create_user_fail(
60 | mock_auth_frontend_client: FlaskClient,
61 | ) -> None:
62 | with mock_auth_frontend_client as client:
63 | with requests_mock.Mocker() as m:
64 | m.get("http://localhost:5000/user/list-users", json=[])
65 | m.post("http://localhost:5000/user/create-user", status_code=500)
66 | response = client.post(
67 | "/users/index",
68 | data={
69 | "email": "newuser@example.com",
70 | "password": "password",
71 | "submitNewUser": "",
72 | },
73 | )
74 | assert response.status_code == 200
75 | html_content = response.data.decode("utf-8")
76 | assert "Failed to create user" in html_content
77 |
78 |
79 | def test_users_index_post_mock_delete_user(
80 | mock_auth_frontend_client: FlaskClient,
81 | ) -> None:
82 | with mock_auth_frontend_client as client:
83 | with requests_mock.Mocker() as m:
84 | m.get("http://localhost:5000/user/list-users", json=["user1@example.com"])
85 | m.post("http://localhost:5000/user/delete-user", status_code=200)
86 | response = client.post(
87 | "/users/index",
88 | data={
89 | "email": "user1@example.com",
90 | "submitDelete": "",
91 | },
92 | )
93 | assert response.status_code == 200
94 | html_content = response.data.decode("utf-8")
95 | assert "User deleted successfully" in html_content
96 |
97 |
98 | def test_users_index_post_mock_delete_user_fail(
99 | mock_auth_frontend_client: FlaskClient,
100 | ) -> None:
101 | with mock_auth_frontend_client as client:
102 | with requests_mock.Mocker() as m:
103 | m.get("http://localhost:5000/user/list-users", json=["user1@example.com"])
104 | m.post("http://localhost:5000/user/delete-user", status_code=500)
105 | response = client.post(
106 | "/users/index",
107 | data={
108 | "email": "user1@example.com",
109 | "submitDelete": "",
110 | },
111 | )
112 | assert response.status_code == 200
113 | html_content = response.data.decode("utf-8")
114 | assert "Failed to delete user" in html_content
115 |
--------------------------------------------------------------------------------
/dtbase/backend/database/queries.py:
--------------------------------------------------------------------------------
1 | """Functions for building various database queries.
2 |
3 | Each function returns a SQLAlchemy Query object. Turning these into subqueries or CTEs
4 | or executing them is the responsibility of the caller.
5 | """
6 | from typing import Any
7 |
8 | import sqlalchemy as sqla
9 | from sqlalchemy.orm import aliased
10 | from sqlalchemy.sql.selectable import Select
11 |
12 | from dtbase.backend.database import utils
13 | from dtbase.backend.database.structure import (
14 | Location,
15 | LocationIdentifier,
16 | LocationSchema,
17 | LocationSchemaIdentifierRelation,
18 | SensorMeasure,
19 | SensorType,
20 | SensorTypeMeasureRelation,
21 | )
22 | from dtbase.backend.database.utils import Session
23 |
24 |
25 | def location_identifiers_by_schema() -> Select:
26 | """Query for identifiers of locations by schema."""
27 | query = (
28 | sqla.select(
29 | LocationSchema.id.label("schema_id"),
30 | LocationSchema.name.label("schema_name"),
31 | LocationIdentifier.id.label("identifier_id"),
32 | LocationIdentifier.name.label("identifier_name"),
33 | LocationIdentifier.units.label("identifier_units"),
34 | LocationIdentifier.datatype.label("identifier_datatype"),
35 | )
36 | .join(
37 | LocationSchemaIdentifierRelation,
38 | LocationSchemaIdentifierRelation.schema_id == LocationSchema.id,
39 | )
40 | .join(
41 | LocationIdentifier,
42 | LocationIdentifier.id == LocationSchemaIdentifierRelation.identifier_id,
43 | )
44 | )
45 | return query
46 |
47 |
48 | def select_location_by_coordinates(
49 | schema_name: str, session: Session, **kwargs: Any
50 | ) -> Select:
51 | """Query for locations and their coordinates.
52 |
53 | Return a query with the column `id` and one column for each location identifier for
54 | this location schema. Each row is a location. Keyword arguments can be used to
55 | filter by the location identifiers, e.g. with no keyword arguments the query will
56 | return all locations in this schema, and with all location identifiers specified in
57 | the keyword arguments the query will return a single location.
58 |
59 | For instance, `select_location_by_coordinates("latlong", latitude=0)` will return a
60 | query for "latlong" locations that have latitude=0.
61 |
62 | Note that in the process of constructing this query another query needs to be
63 | executed.
64 | """
65 | # Find the identifiers for this schema.
66 | schema_sq = location_identifiers_by_schema().subquery()
67 | schema_q = sqla.select(
68 | schema_sq.c.identifier_id,
69 | schema_sq.c.identifier_name,
70 | schema_sq.c.identifier_datatype,
71 | ).where(schema_sq.c.schema_name == schema_name)
72 | identifiers = session.execute(schema_q).fetchall()
73 |
74 | # Check that no extraneous keyword arguments are given.
75 | identifier_names = set(x[1] for x in identifiers)
76 | for key in kwargs:
77 | if key not in identifier_names:
78 | msg = f"Location identifier '{key}' not valid for schema '{schema_name}'"
79 | raise ValueError(msg)
80 |
81 | # Create the query for locations.
82 | columns = [Location.id]
83 | joins = []
84 | for id_id, id_name, id_datatype in identifiers:
85 | value_class = aliased(utils.location_value_class_dict[id_datatype])
86 | columns.append(value_class.value.label(id_name))
87 | join_conditions = [
88 | value_class.location_id == Location.id,
89 | value_class.identifier_id == id_id,
90 | ]
91 | if id_name in kwargs:
92 | join_conditions.append(value_class.value == kwargs[id_name])
93 | joins.append((value_class, sqla.and_(*join_conditions)))
94 | location_q = sqla.select(*columns)
95 | for join in joins:
96 | location_q = location_q.join(*join)
97 | return location_q
98 |
99 |
100 | def sensor_measures_by_type() -> Select:
101 | """Query for measures of sensors by sensor type."""
102 | query = (
103 | sqla.select(
104 | SensorType.id.label("type_id"),
105 | SensorType.name.label("type_name"),
106 | SensorMeasure.id.label("measure_id"),
107 | SensorMeasure.name.label("measure_name"),
108 | SensorMeasure.units.label("measure_units"),
109 | SensorMeasure.datatype.label("measure_datatype"),
110 | )
111 | .join(
112 | SensorTypeMeasureRelation,
113 | SensorTypeMeasureRelation.type_id == SensorType.id,
114 | )
115 | .join(
116 | SensorMeasure,
117 | SensorMeasure.id == SensorTypeMeasureRelation.measure_id,
118 | )
119 | )
120 | return query
121 |
--------------------------------------------------------------------------------
/tests/test_api_user.py:
--------------------------------------------------------------------------------
1 | """
2 | Test API endpoints for users
3 | """
4 | from collections.abc import Collection
5 |
6 | import pytest
7 | from fastapi import FastAPI
8 | from fastapi.testclient import TestClient
9 | from httpx import Response
10 | from starlette.routing import Route
11 |
12 | from dtbase.core.constants import DEFAULT_USER_EMAIL
13 |
14 | from .conftest import check_for_docker
15 | from .utils import TEST_USER_EMAIL, assert_unauthorized, can_login
16 |
17 | DOCKER_RUNNING = check_for_docker()
18 |
19 |
20 | # Some example values used for testing.
21 | EMAIL = "hubby@hobbob.bubbly"
22 | PASSWORD = "iknowsecurity"
23 |
24 |
25 | def create_user(client: TestClient) -> Response:
26 | """Create a user to test against."""
27 | type_data = {"email": EMAIL, "password": PASSWORD}
28 | response = client.post("/user/create-user", json=type_data)
29 | return response
30 |
31 |
32 | def assert_list_users(client: TestClient, users: Collection[str]) -> None:
33 | """Assert that the `/user/list-users` end point returns successfully, and that it
34 | returns exactly the users provided in the second arguments.
35 | """
36 | response = client.get("/user/list-users")
37 | assert response.status_code == 200
38 | assert response.json() is not None
39 | assert set(response.json()) == set(users)
40 |
41 |
42 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
43 | def test_create_user(auth_client: TestClient) -> None:
44 | response = create_user(auth_client)
45 | assert response.status_code == 201
46 | assert response.json() == {"detail": "User created"}
47 | assert_list_users(auth_client, {TEST_USER_EMAIL, DEFAULT_USER_EMAIL, EMAIL})
48 | assert can_login(auth_client, email=EMAIL, password=PASSWORD)
49 |
50 |
51 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
52 | def test_create_user_duplicate(auth_client: TestClient) -> None:
53 | create_user(auth_client)
54 | response = create_user(auth_client)
55 | assert response.status_code == 409
56 | assert response.json() == {"detail": "User already exists"}
57 |
58 |
59 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
60 | def test_delete_user(auth_client: TestClient) -> None:
61 | create_user(auth_client)
62 | response = auth_client.post("/user/delete-user", json={"email": EMAIL})
63 | assert response.status_code == 200
64 | assert response.json() == {"detail": "User deleted"}
65 | assert_list_users(auth_client, {TEST_USER_EMAIL, DEFAULT_USER_EMAIL})
66 |
67 |
68 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
69 | def test_delete_user_nonexistent(auth_client: TestClient) -> None:
70 | response = auth_client.post("/user/delete-user", json={"email": EMAIL})
71 | assert response.status_code == 400
72 | assert response.json() == {"detail": "User doesn't exist"}
73 |
74 |
75 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
76 | def test_change_password(auth_client: TestClient) -> None:
77 | create_user(auth_client)
78 | new_password = "new kids on the block"
79 | response = auth_client.post(
80 | "/user/change-password", json={"email": EMAIL, "password": new_password}
81 | )
82 | assert response.status_code == 200
83 | assert response.json() == {"detail": "Password changed"}
84 | assert can_login(auth_client, EMAIL, new_password)
85 |
86 |
87 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
88 | def test_change_password_nonexistent(auth_client: TestClient) -> None:
89 | new_password = "new kids on the block"
90 | response = auth_client.post(
91 | "/user/change-password", json={"email": EMAIL, "password": new_password}
92 | )
93 | assert response.status_code == 400
94 | assert response.json() == {"detail": "User doesn't exist"}
95 |
96 |
97 | @pytest.mark.skipif(not DOCKER_RUNNING, reason="requires docker")
98 | def test_unauthorized(client: TestClient, app: FastAPI) -> None:
99 | """Check that we aren't able to access any of the end points if we don't have an
100 | authorization token.
101 |
102 | Note that this one, unlike all the others, uses the `client` rather than the
103 | `auth_client` fixture.
104 | """
105 | # loop through all endpoints
106 | for route in app.routes:
107 | if not isinstance(route, Route):
108 | # For some reason, FastAPI type-annotates app.routes as Sequence[BaseRoute],
109 | # rather than Sequence[Route]. In case we ever encounter a router isn't a
110 | # Route, raise an error.
111 | raise ValueError(f"route {route} is not a Route")
112 | methods = route.methods
113 | if methods is None:
114 | continue
115 | methods = iter(methods)
116 | if methods and route.path.startswith("/user"):
117 | method = next(methods)
118 | assert_unauthorized(client, method.lower(), route.path)
119 |
--------------------------------------------------------------------------------
/dtbase/frontend/utils.py:
--------------------------------------------------------------------------------
1 | """Utilities for the front end."""
2 | import unicodedata
3 | import urllib
4 | from collections.abc import Collection
5 | from typing import Any, Dict, List, Optional
6 | from urllib.parse import urlparse
7 |
8 | from flask import Request
9 |
10 |
11 | def parse_url_parameter(request: Request, parameter: str) -> Optional[str]:
12 | """Parse a URL parameter, doing any unquoting as necessary. Return None if the
13 | parameter doesn't exist.
14 | """
15 | raw = request.args.get(parameter)
16 | if raw is not None:
17 | parsed = urllib.parse.unquote(raw)
18 | else:
19 | parsed = None
20 | return parsed
21 |
22 |
23 | def convert_form_values(
24 | variables: List[Dict[str, Any]], form: dict, prefix: str = "identifier"
25 | ) -> Dict[str, Any]:
26 | """
27 | Prepared the form and converts values to their respective datatypes as defined in
28 | the schema. Returns a dictionary of converted values.
29 | """
30 |
31 | # Define a dictionary mapping type names to conversion functions
32 | conversion_functions = {
33 | "integer": int,
34 | "float": float,
35 | "string": str,
36 | "boolean": lambda x: x.lower() == "true",
37 | }
38 |
39 | converted_values = {}
40 |
41 | for variable in variables:
42 | value = form.get(f"{prefix}_{variable['name']}")
43 | datatype = variable["datatype"]
44 |
45 | # Get the conversion function for this datatype
46 | conversion_function = conversion_functions.get(datatype)
47 |
48 | if not conversion_function:
49 | raise ValueError(
50 | f"Unknown datatype '{datatype}' for variable '{variable['name']}'"
51 | )
52 |
53 | try:
54 | # Convert the value to the correct datatype
55 | converted_value = conversion_function(value)
56 | except ValueError:
57 | raise ValueError(
58 | f"Invalid value '{value}' for variable '{variable['name']}' "
59 | f"(expected {datatype})"
60 | )
61 |
62 | converted_values[variable["name"]] = converted_value
63 |
64 | return converted_values
65 |
66 |
67 | # The following two functions mimic similar ones from Django.
68 |
69 |
70 | def url_has_allowed_host_and_scheme(
71 | url: Optional[str],
72 | allowed_hosts: Collection[str] | str | None,
73 | require_https: bool = False,
74 | ) -> bool:
75 | """
76 | Return `True` if the url uses an allowed host and a safe scheme.
77 |
78 | Always return `False` on an empty url.
79 |
80 | If `require_https` is `True`, only 'https' will be considered a valid scheme, as
81 | opposed to 'http' and 'https' with the default, `False`.
82 | """
83 | if url is not None:
84 | url = url.strip()
85 | if not url:
86 | return False
87 | if allowed_hosts is None:
88 | allowed_hosts = set()
89 | elif isinstance(allowed_hosts, str):
90 | allowed_hosts = {allowed_hosts}
91 | # Chrome treats \ completely as / in paths but it could be part of some
92 | # basic auth credentials so we need to check both URLs.
93 | return _url_has_allowed_host_and_scheme(
94 | url, allowed_hosts, require_https=require_https
95 | ) and _url_has_allowed_host_and_scheme(
96 | url.replace("\\", "/"), allowed_hosts, require_https=require_https
97 | )
98 |
99 |
100 | def _url_has_allowed_host_and_scheme(
101 | url: str, allowed_hosts: Collection[str], require_https: bool = False
102 | ) -> bool:
103 | # Chrome considers any URL with more than two slashes to be absolute, but
104 | # urlparse is not so flexible. Treat any url with three slashes as unsafe.
105 | if url.startswith("///"):
106 | return False
107 | try:
108 | url_info = urlparse(url)
109 | except ValueError: # e.g. invalid IPv6 addresses
110 | return False
111 | # Forbid URLs like http:///example.com - with a scheme, but without a hostname.
112 | # In that URL, example.com is not the hostname but, a path component. However,
113 | # Chrome will still consider example.com to be the hostname, so we must not
114 | # allow this syntax.
115 | if not url_info.netloc and url_info.scheme:
116 | return False
117 | # Forbid URLs that start with control characters. Some browsers (like
118 | # Chrome) ignore quite a few control characters at the start of a
119 | # URL and might consider the URL as scheme relative.
120 | if unicodedata.category(url[0])[0] == "C":
121 | return False
122 | scheme = url_info.scheme
123 | # Consider URLs without a scheme (e.g. //example.com/p) to be http.
124 | if not url_info.scheme and url_info.netloc:
125 | scheme = "http"
126 | valid_schemes = ["https"] if require_https else ["http", "https"]
127 | return (not url_info.netloc or url_info.netloc in allowed_hosts) and (
128 | not scheme or scheme in valid_schemes
129 | )
130 |
--------------------------------------------------------------------------------
/dtbase/frontend/app/locations/routes.py:
--------------------------------------------------------------------------------
1 | from flask import flash, redirect, render_template, request, url_for
2 | from flask_login import current_user, login_required
3 | from werkzeug.wrappers import Response
4 |
5 | from dtbase.frontend import utils
6 | from dtbase.frontend.app.locations import blueprint
7 |
8 |
9 | @login_required
10 | @blueprint.route("/new-location-schema", methods=["GET"])
11 | def new_location_schema(form_data: str = None) -> Response:
12 | existing_identifiers_response = current_user.backend_call(
13 | "get", "/location/list-location-identifiers"
14 | )
15 | existing_identifiers = existing_identifiers_response.json()
16 | return render_template(
17 | "location_schema_form.html",
18 | form_data=form_data,
19 | existing_identifiers=existing_identifiers,
20 | )
21 |
22 |
23 | @login_required
24 | @blueprint.route("/new-location-schema", methods=["POST"])
25 | def submit_location_schema() -> Response:
26 | name = request.form.get("name")
27 | description = request.form.get("description")
28 | identifier_names = request.form.getlist("identifier_name[]")
29 | identifier_units = request.form.getlist("identifier_units[]")
30 | identifier_datatypes = request.form.getlist("identifier_datatype[]")
31 |
32 | identifiers = [
33 | {
34 | "name": name,
35 | "units": units,
36 | "datatype": datatype,
37 | }
38 | for name, units, datatype in zip(
39 | identifier_names,
40 | identifier_units,
41 | identifier_datatypes,
42 | )
43 | ]
44 |
45 | form_data = {
46 | "name": name,
47 | "description": description,
48 | "identifiers": identifiers,
49 | }
50 |
51 | try:
52 | response = current_user.backend_call(
53 | "post", "/location/insert-location-schema", form_data
54 | )
55 | except Exception as e:
56 | flash(f"Error communicating with the backend: {e}", "error")
57 | return redirect(url_for(".new_location_schema"))
58 |
59 | if response.status_code == 409:
60 | flash(f"The schema '{name}' already exists.", "error")
61 | elif response.status_code != 201:
62 | flash(
63 | f"An error occurred while adding the location schema: {response}", "error"
64 | )
65 | else:
66 | flash("Location schema added successfully", "success")
67 |
68 | return redirect(url_for(".new_location_schema"))
69 |
70 |
71 | @login_required
72 | @blueprint.route("/new-location", methods=["GET"])
73 | def new_location() -> str:
74 | response = current_user.backend_call("get", "/location/list-location-schemas")
75 | schemas = response.json()
76 | selected_schema = request.args.get("schema_name")
77 | return render_template(
78 | "location_form.html", schemas=schemas, selected_schema=selected_schema
79 | )
80 |
81 |
82 | @login_required
83 | @blueprint.route("/new-location", methods=["POST"])
84 | def submit_location() -> Response:
85 | # Retrieve the name of the schema
86 | schema_name = request.form.get("schema")
87 | # Once all is said and done handling the POST request, we redirect the user here.
88 | redirected_destination = redirect(url_for(".new_location", schema_name=schema_name))
89 | # Retrieve the identifiers and values based on the schema
90 | payload = {"schema_name": schema_name}
91 | response = current_user.backend_call(
92 | "post", "/location/get-schema-details", payload
93 | )
94 | schema = response.json()
95 | try:
96 | # Convert form values to their respective datatypes as defined in the schema
97 | form_data = utils.convert_form_values(schema["identifiers"], request.form)
98 | except ValueError as e:
99 | flash(str(e), "error")
100 | return redirected_destination
101 |
102 | try:
103 | # Send a POST request to the backend
104 | payload = {"schema_name": schema_name, "coordinates": form_data}
105 | response = current_user.backend_call(
106 | "post", "/location/insert-location-for-schema", payload
107 | )
108 | except Exception as e:
109 | flash(f"Error communicating with the backend: {e}", "error")
110 | return redirected_destination
111 |
112 | if response.status_code == 409:
113 | flash("Location already exists", "error")
114 | elif response.status_code != 201:
115 | flash(
116 | f"An error occurred while adding the location: {response.json()}", "error"
117 | )
118 | else:
119 | flash("Location added successfully", "success")
120 | return redirected_destination
121 |
122 |
123 | @login_required
124 | @blueprint.route("/locations-table", methods=["GET"])
125 | def locations_table() -> Response:
126 | schemas_response = current_user.backend_call(
127 | "get", "/location/list-location-schemas"
128 | )
129 |
130 | schemas = schemas_response.json()
131 | locations_for_each_schema = {}
132 |
133 | for schema in schemas:
134 | payload = {"schema_name": schema["name"]}
135 | locations_response = current_user.backend_call(
136 | "post", "/location/list-locations", payload
137 | )
138 | locations_for_each_schema[schema["name"]] = locations_response.json()
139 |
140 | return render_template(
141 | "locations_table.html",
142 | schemas=schemas,
143 | locations_for_each_schema=locations_for_each_schema,
144 | )
145 |
--------------------------------------------------------------------------------
/tests/test_ingress_weather.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 |
3 | import pytest
4 | import requests_mock
5 | from fastapi.testclient import TestClient
6 | from freezegun import freeze_time
7 |
8 | from dtbase.ingress.ingress_weather import (
9 | SENSOR_OPENWEATHERMAPFORECAST,
10 | SENSOR_OPENWEATHERMAPHISTORICAL,
11 | OpenWeatherDataIngress,
12 | )
13 |
14 | from .resources.data_for_tests import (
15 | EXPECTED_OPENWEATHERMAP_FORECAST_GET_DATA_RESPONSE,
16 | EXPECTED_OPENWEATHERMAP_HISTORICAL_GET_DATA_RESPONSE,
17 | MOCKED_CONST_OPENWEATHERMAP_FORECAST_URL_RESPONSE,
18 | MOCKED_CONST_OPENWEATHERMAP_HISTORICAL_URL_RESPONSE,
19 | )
20 |
21 | NOW = datetime.now()
22 | open_weather_data_ingress = OpenWeatherDataIngress()
23 |
24 | FORECAST_BASE_URL = "https://api.openweathermap.org/data/3.0/onecall"
25 | HISTORICAL_BASE_URL = "https://api.openweathermap.org/data/2.5/onecall/timemachine"
26 | API_KEY = "api_key"
27 | LATITUDE = 0
28 | LONGITUDE = 1
29 |
30 |
31 | @pytest.mark.parametrize(
32 | "dt_from, dt_to, expected",
33 | [
34 | (
35 | NOW - timedelta(hours=72),
36 | "present",
37 | (
38 | (
39 | HISTORICAL_BASE_URL
40 | + f"?lat={LATITUDE}&lon={LONGITUDE}&units=metric&appid={API_KEY}"
41 | ),
42 | SENSOR_OPENWEATHERMAPHISTORICAL,
43 | NOW - timedelta(hours=72),
44 | open_weather_data_ingress.present,
45 | ),
46 | ),
47 | (
48 | "present",
49 | NOW + timedelta(days=1),
50 | (
51 | (
52 | FORECAST_BASE_URL
53 | + f"?lat={LATITUDE}&lon={LONGITUDE}&units=metric&appid={API_KEY}"
54 | ),
55 | SENSOR_OPENWEATHERMAPFORECAST,
56 | open_weather_data_ingress.present,
57 | NOW + timedelta(days=1),
58 | ),
59 | ),
60 | ],
61 | )
62 | def test_get_api_base_url_and_sensor(
63 | dt_from: datetime,
64 | dt_to: datetime,
65 | expected: tuple[str, dict[str, str], datetime, datetime],
66 | ) -> None:
67 | """Test the get_api_base_url_and_sensor method for scenarios where the correct //
68 | API base URL and sensor should be returned"""
69 | assert (
70 | open_weather_data_ingress.get_api_base_url_and_sensor(
71 | dt_from, dt_to, API_KEY, LATITUDE, LONGITUDE
72 | )
73 | == expected
74 | )
75 |
76 |
77 | @pytest.mark.parametrize(
78 | "dt_from, dt_to, expected",
79 | [
80 | (NOW - timedelta(days=21), "present", ValueError),
81 | ("present", NOW + timedelta(days=12), ValueError),
82 | ("present", "present", ValueError),
83 | (NOW + timedelta(days=2), "present", ValueError),
84 | (NOW - timedelta(days=2), NOW + timedelta(days=2), ValueError),
85 | ],
86 | )
87 | def test_get_api_base_url_and_sensor_raises(
88 | dt_from: datetime, dt_to: datetime, expected: ValueError
89 | ) -> None:
90 | """Test the get_api_base_url_and_sensor method for scenarios where an error //
91 | should be raised"""
92 | with pytest.raises(expected):
93 | open_weather_data_ingress.get_api_base_url_and_sensor(
94 | dt_from, dt_to, API_KEY, LATITUDE, LONGITUDE
95 | )
96 |
97 |
98 | @freeze_time("2024-01-06")
99 | def test_get_data_historical_api(conn_backend: TestClient) -> None:
100 | """Test the get_data method for a scenario where the historical API would be used"""
101 | weather_ingress = OpenWeatherDataIngress()
102 | dt_to = datetime(2024, 1, 5, 16, 1, 1, 1)
103 | dt_from = dt_to - timedelta(hours=2)
104 | with requests_mock.Mocker() as m:
105 | m.get(
106 | HISTORICAL_BASE_URL,
107 | json=MOCKED_CONST_OPENWEATHERMAP_HISTORICAL_URL_RESPONSE,
108 | )
109 | response = weather_ingress.get_service_data(
110 | dt_from, dt_to, API_KEY, LATITUDE, LONGITUDE
111 | )
112 | assert response == EXPECTED_OPENWEATHERMAP_HISTORICAL_GET_DATA_RESPONSE
113 |
114 | responses = weather_ingress(
115 | dt_from=dt_from,
116 | dt_to=dt_to,
117 | api_key=API_KEY,
118 | latitude=LATITUDE,
119 | longitude=LONGITUDE,
120 | )
121 | for response in responses:
122 | assert response.status_code < 300
123 | assert len(responses) == 9
124 |
125 |
126 | @freeze_time("2024-01-04")
127 | def test_get_data_forecast_api(conn_backend: TestClient) -> None:
128 | """Test the get_data method for a scenario where the forecast API would be used"""
129 | weather_ingress = OpenWeatherDataIngress()
130 | dt_from = datetime(2024, 1, 5, 16, 1, 1, 1)
131 | dt_to = dt_from + timedelta(hours=2)
132 | with requests_mock.Mocker() as m:
133 | m.get(
134 | FORECAST_BASE_URL,
135 | json=MOCKED_CONST_OPENWEATHERMAP_FORECAST_URL_RESPONSE,
136 | )
137 | response = weather_ingress.get_service_data(
138 | dt_from, dt_to, API_KEY, LATITUDE, LONGITUDE
139 | )
140 | assert response == EXPECTED_OPENWEATHERMAP_FORECAST_GET_DATA_RESPONSE
141 |
142 | responses = weather_ingress(
143 | dt_from=dt_from,
144 | dt_to=dt_to,
145 | api_key=API_KEY,
146 | latitude=LATITUDE,
147 | longitude=LONGITUDE,
148 | )
149 | for response in responses:
150 | assert response.status_code < 300
151 | assert len(responses) == 9
152 |
--------------------------------------------------------------------------------