├── .dockerignore ├── .github ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── main.yml │ └── scripts │ ├── doc_aglio_api.sh │ └── translate_docker_tag.sh ├── .gitignore ├── DeviceManager ├── BackendHandler.py ├── DatabaseHandler.py ├── DatabaseModels.py ├── DeviceHandler.py ├── ErrorManager.py ├── ImportHandler.py ├── KafkaNotifier.py ├── Logger.py ├── LoggerHandler.py ├── SerializationModels.py ├── TemplateHandler.py ├── TenancyManager.py ├── __init__.py ├── app.py ├── conf.py ├── main.py ├── migrations └── utils.py ├── Dockerfile ├── LICENSE ├── NOTICE ├── README.md ├── createMigration.sh ├── docker ├── entrypoint.sh └── waitForDb.py ├── docs └── apiary.apib ├── dredd.yml ├── local └── compose.yml ├── migrations ├── README ├── alembic.ini ├── env.py ├── script.py.mako └── versions │ ├── 6beff7876a3a_.py │ └── fabf2ca39860_.py ├── requirements └── requirements.txt ├── setup.py └── tests ├── .dockerignore ├── Dockerfile ├── __init__.py ├── docker-compose.yaml ├── dredd-hooks ├── authentication_hook.py ├── operation_hook.py └── token_generator.py ├── requirements.txt ├── start-test.sh ├── test_app.py ├── test_backend_handler.py ├── test_database_handler.py ├── test_device_handler.py ├── test_error_manager.py ├── test_import_handler.py ├── test_kafka_notifier.py ├── test_logger_handler.py ├── test_template_handler.py ├── test_tenancy_manager.py ├── test_utils.py └── token_test_generator.py /.dockerignore: -------------------------------------------------------------------------------- 1 | *.md 2 | *.rst 3 | Dockerfile 4 | LICENSE 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * **I'm submitting a ...** 2 | - [ ] bug report 3 | - [ ] feature request 4 | - [ ] support request 5 | 6 | 7 | * **Do you want to request a *feature* or report a *bug*?** 8 | 9 | 10 | 11 | * **What is the current behavior?** 12 | 13 | 14 | 15 | * **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** 16 | 17 | 18 | 19 | * **What is the expected behavior?** 20 | 21 | 22 | 23 | * **What is the motivation / use case for changing the behavior?** 24 | 25 | 26 | 27 | * **Please tell us about your environment:** 28 | 29 | - Version: 0.2.x 30 | - Environment: [kubernetes, docker-compose, standalone] 31 | - Operating system: [Ubuntu 16.04, Fedora 28, Ubuntu on VirtualBox (Windows 10), etc.] 32 | 33 | 34 | * **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow, gitter, etc) 35 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * **Please check if the PR fulfills these requirements** 2 | - [ ] Tests for the changes have been added (for bug fixes / features) 3 | - [ ] Docs have been added / updated (for bug fixes / features) 4 | 5 | 6 | * **What kind of change does this PR introduce?** (Bug fix, feature, docs update, ...) 7 | 8 | 9 | 10 | * **What is the current behavior?** (You can also link to an open issue here) 11 | 12 | 13 | 14 | * **What is the new behavior (if this is a feature change)?** 15 | 16 | 17 | 18 | * **Does this PR introduce a breaking change?** (What changes might users need to make in their application due to this PR?) 19 | 20 | 21 | * **Is there any issue related to this PR in other repository?** (such as dojot/dojot) 22 | 23 | 24 | * **Other information**: 25 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Device-manager 2 | 3 | # Triggered after a branch is created, a tag or a branch is pushed or a PR 4 | on: 5 | create: 6 | push: 7 | pull_request: 8 | 9 | jobs: 10 | 11 | python-tests: 12 | name: Python Tests 13 | if: contains(github.event.head_commit.message, 'ci skip') == false # if the commit message has ci skip, doesn't run anything because needs 14 | runs-on: ubuntu-18.04 15 | strategy: 16 | matrix: 17 | python-version: [3.6.x] 18 | steps: 19 | - name: checkout 20 | uses: actions/checkout@v2 21 | 22 | - name: Use Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v1 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - name: pip install requirements 28 | run: | 29 | pip install pip==19.0.3 -r requirements/requirements.txt 30 | pip install pip==19.0.3 -r tests/requirements.txt 31 | pip install codecov 32 | 33 | - name: node modules cache 34 | uses: actions/cache@v2 35 | with: 36 | path: '**/node_modules' 37 | key: ${{ runner.os }}-modules 38 | 39 | 40 | - name: Build the stack and test 41 | run: | 42 | docker build -t dojot/device-manager . 43 | docker build -t dredd/test -f tests/Dockerfile . 44 | docker-compose -p test -f tests/docker-compose.yaml up -d kafka data-broker postgres device-manager postgres-users 45 | docker-compose -p test -f tests/docker-compose.yaml run --rm test-runner 46 | 47 | - name: pytest 48 | env: 49 | DEV_MNGR_CRYPTO_PASS: "kamehameHA" 50 | DEV_MNGR_CRYPTO_IV: 1234567890123456 51 | DEV_MNGR_CRYPTO_SALT: "shuriken" 52 | run: | 53 | export DEV_MNGR_CRYPTO_IV=1234567890123456 54 | python3 -m pytest --cov-report=html --cov=DeviceManager tests/ 55 | coverage run -m pytest tests && coverage xml 56 | 57 | 58 | - name: Upload coverage to Codecov 59 | uses: codecov/codecov-action@v1 60 | with: 61 | file: coverage.xml 62 | flags: Device-manager 63 | name: device-manager 64 | 65 | docker-hub: 66 | needs: python-tests 67 | name: Build and Publish image to DockerHub 68 | runs-on: ubuntu-18.04 69 | # If this is a PR, it won't run 70 | if: contains(github.ref, 'refs/pull/') == false 71 | steps: 72 | 73 | - name: Owner Repository 74 | id: get_owner 75 | run: | 76 | export OWNER=$(echo ${{ github.repository }} | cut -f1 -d"/") 77 | echo Current Owner is $OWNER 78 | echo ::set-output name=owner::$OWNER 79 | - name: Checkout 80 | uses: actions/checkout@v1 81 | 82 | - name: Build and Push 83 | uses: elgohr/Publish-Docker-Github-Action@2.16 84 | with: 85 | name: ${{ steps.get_owner.outputs.owner }}/device-manager 86 | username: ${{ secrets.DOCKER_USERNAME }} 87 | password: ${{ secrets.DOCKER_PASSWORD }} 88 | # Label with the built image's commit 89 | buildoptions: " --label commit_sha_dojot=${{ github.sha }} " 90 | # Whether tag will be based in the branch name or in the tag name 91 | tag_names: true 92 | 93 | gh-pages: 94 | needs: docker-hub 95 | 96 | name: Generate documentation and Publish in gh-pages 97 | runs-on: ubuntu-18.04 98 | if: contains(github.ref, 'refs/pull/') == false # if this a PR doesn't run 99 | steps: 100 | 101 | - name: Checkout 102 | uses: actions/checkout@v1 103 | 104 | - name: Version Doc 105 | id: version_doc 106 | env: 107 | GITHUB_REF: ${{ github.ref }} 108 | GITHUB_SHA: ${{ github.sha }} 109 | run: | 110 | export TAG_VERSION=$(sh .github/workflows/scripts/translate_docker_tag.sh) 111 | echo Documentation Version $TAG_VERSION 112 | echo ::set-output name=version::$TAG_VERSION 113 | - name: Generate docs 114 | env: 115 | FOLDER_TO_GH_PAGES: / 116 | PATH_APIB: ./docs/apiary.apib 117 | VERSION_NAME: ${{ steps.version_doc.outputs.version }} 118 | run: | 119 | sh .github/workflows/scripts/doc_aglio_api.sh 120 | - name: Publish Documentation 121 | uses: peaceiris/actions-gh-pages@v3 122 | with: 123 | github_token: ${{ secrets.GITHUB_TOKEN }} 124 | publish_branch: gh-pages 125 | publish_dir: ./build-docs #fix when use .github/workflows/scripts/doc_aglio_api.sh 126 | keep_files: true 127 | allow_empty_commit: false -------------------------------------------------------------------------------- /.github/workflows/scripts/doc_aglio_api.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | mkdir -p ./build-docs 4 | 5 | cp $PATH_APIB ./build-docs/api.apib 6 | 7 | cd ./build-docs 8 | 9 | mkdir -p $FOLDER_TO_GH_PAGES 10 | 11 | docker run --volume $(pwd):/temp:Z dojot/aglio -i /temp/api.apib -o - > ./$FOLDER_TO_GH_PAGES/apiary_${VERSION_NAME}.html 12 | 13 | rm -rf api.apib 14 | 15 | git add $FOLDER_TO_GH_PAGES/apiary_${VERSION_NAME}.html 16 | 17 | if [ -f "$FOLDER_TO_GH_PAGES/apiary_${VERSION_NAME}.html" ]; then 18 | echo "Doc API create " 19 | else 20 | echo "Error, couldn't create file for doc" 21 | fi -------------------------------------------------------------------------------- /.github/workflows/scripts/translate_docker_tag.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | BRANCH=$(echo ${GITHUB_REF} | sed -e "s/refs\/heads\///g" | sed -e "s/\//-/g") 5 | 6 | if [ "${BRANCH}" = "master" ]; then 7 | TAG="latest" 8 | elif [ $(echo "${GITHUB_REF}" | sed -e "s/refs\/tags\///g") != "${GITHUB_REF}" ]; then 9 | #is Git Tag 10 | TAG=$(echo ${GITHUB_REF} | sed -e "s/refs\/tags\///g") 11 | elif [ $(echo "${GITHUB_REF}" | sed -e "s/refs\/pull\///g") != "${GITHUB_REF}" ]; then 12 | #isPullRequest 13 | TAG="${GITHUB_SHA}" 14 | else 15 | TAG="${BRANCH}" 16 | fi; 17 | 18 | #tag for version 19 | echo $TAG -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.svg 3 | .eggs/ 4 | .vscode/ 5 | .idea/ 6 | docs/build/* 7 | *egg-info 8 | .coverage 9 | htmlcov/ 10 | venv -------------------------------------------------------------------------------- /DeviceManager/BackendHandler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Defines common handler interface and implementations for devices 3 | """ 4 | 5 | import json 6 | import traceback 7 | import requests 8 | from DeviceManager.utils import HTTPRequestError 9 | from DeviceManager.KafkaNotifier import KafkaNotifier, DeviceEvent 10 | import logging 11 | from datetime import datetime 12 | import time 13 | from DeviceManager.Logger import Log 14 | 15 | 16 | LOGGER = Log().color_log() 17 | 18 | 19 | # TODO: this actually is a symptom of bad responsability management. 20 | # All device bookkeeping should be performed on a single (perhaps this) service, with the 21 | # services that implement specific features referring back to the single device management 22 | # service for their transient data. 23 | class BackendHandler(object): 24 | """ 25 | Abstract class that represents an implementation backend on the internal middleware 26 | infrastructure. 27 | """ 28 | 29 | def create(self, device): 30 | """ 31 | Creates the given device on the implemented backend. 32 | :param device: Dictionary with the full device configuration 33 | :returns: True if operation succeeded 34 | :raises HTTPRequestError 35 | """ 36 | raise NotImplementedError('Abstract method called') 37 | 38 | def remove(self, device_id): 39 | """ 40 | Removes the device identified by the given id 41 | :param device_id: unique identifier of the device to be removed 42 | :raises HTTPRequestError 43 | """ 44 | raise NotImplementedError('Abstract method called') 45 | 46 | def update(self, device): 47 | """ 48 | Updates the given device on the implemented backend. 49 | :param device: Dictionary with the full device configuration. Must contain an 'id' 50 | field with the unique identifier of the device to be updated. That 51 | field must not be changed. 52 | :raises HTTPRequestError 53 | """ 54 | raise NotImplementedError('Abstract method called') 55 | 56 | 57 | class KafkaHandler: 58 | 59 | def __init__(self): 60 | self.kafkaNotifier = KafkaNotifier() 61 | 62 | def create(self, device, meta): 63 | """ 64 | Publishes event to kafka broker, notifying device creation 65 | """ 66 | 67 | LOGGER.info(f" Publishing create event to Kafka") 68 | self.kafkaNotifier.send_notification(DeviceEvent.CREATE, device, meta) 69 | 70 | def remove(self, device, meta): 71 | """ 72 | Publishes event to kafka broker, notifying device removal 73 | """ 74 | 75 | LOGGER.info(f" Publishing remove event to Kafka") 76 | self.kafkaNotifier.send_notification(DeviceEvent.REMOVE, device, meta) 77 | 78 | def update(self, device, meta): 79 | """ 80 | Publishes event to kafka broker, notifying device update 81 | """ 82 | 83 | LOGGER.info(f" Publishing create update to Kafka") 84 | self.kafkaNotifier.send_notification(DeviceEvent.UPDATE, device, meta) 85 | 86 | def configure(self, device, meta): 87 | """ 88 | Publishes event to kafka broker, notifying device configuration 89 | """ 90 | LOGGER.info(f" Publishing configure event to Kafka") 91 | self.kafkaNotifier.send_notification(DeviceEvent.CONFIGURE, device, meta) 92 | 93 | class KafkaInstanceHandler: 94 | 95 | kafkaNotifier = None 96 | 97 | def __init__(self): 98 | pass 99 | 100 | def getInstance(self, kafka_instance): 101 | """ 102 | Instantiates a connection with Kafka, was created because 103 | previously the connection was being created in KafkaNotifier 104 | once time every import. 105 | 106 | :param kafka_instance: An instance of KafkaHandler. 107 | :return An instance of KafkaHandler used to notify 108 | """ 109 | 110 | if kafka_instance is None: 111 | self.kafkaNotifier = KafkaHandler() 112 | 113 | return self.kafkaNotifier 114 | -------------------------------------------------------------------------------- /DeviceManager/DatabaseHandler.py: -------------------------------------------------------------------------------- 1 | import os 2 | from flask import g, request, jsonify, make_response 3 | from flask_sqlalchemy import SQLAlchemy 4 | 5 | from .app import app 6 | from .conf import CONFIG 7 | from .utils import get_allowed_service 8 | from DeviceManager.Logger import Log 9 | 10 | app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False 11 | app.config['SQLALCHEMY_DATABASE_URI'] = CONFIG.get_db_url() 12 | app.config['SQLALCHEMY_BINDS'] = {} 13 | 14 | LOGGER = Log().color_log() 15 | 16 | # adapted from https://gist.github.com/miikka/28a7bd77574a00fcec8d 17 | class MultiTenantSQLAlchemy(SQLAlchemy): 18 | def check_binds(self, bind_key): 19 | binds = app.config.get('SQLALCHEMY_BINDS') 20 | if binds.get(bind_key, None) is None: 21 | binds[bind_key] = CONFIG.get_db_url() 22 | app.config['SQLALCHEMY_BINDS'] = binds 23 | 24 | def choose_tenant(self, bind_key): 25 | if hasattr(g, 'tenant'): 26 | raise RuntimeError('Switching tenant in the middle of the request.') 27 | g.tenant = bind_key 28 | 29 | def get_engine(self, app=None, bind=None): 30 | if bind is None: 31 | if not hasattr(g, 'tenant'): 32 | raise RuntimeError('No tenant chosen.') 33 | bind = g.tenant 34 | self.check_binds(bind) 35 | return super().get_engine(app=app, bind=bind) 36 | 37 | SINGLE_TENANT = os.environ.get('SINGLE_TENANT', False) 38 | if SINGLE_TENANT: 39 | db = SQLAlchemy(app) 40 | else: 41 | db = MultiTenantSQLAlchemy(app) 42 | 43 | @app.before_request 44 | def before_request(): 45 | try: 46 | tenant = get_allowed_service(request.headers['authorization']) 47 | db.choose_tenant(tenant) 48 | except KeyError: 49 | error = {"message": "No authorization token has been supplied", "status": 401} 50 | LOGGER.error(f' {error["message"]} - {error["status"]}.') 51 | return make_response(jsonify(error), 401) 52 | except ValueError: 53 | error = {"message": "Invalid authentication token", "status": 401} 54 | LOGGER.error(f' {error["message"]} - {error["status"]}.') 55 | return make_response(jsonify(error), 401) 56 | 57 | 58 | -------------------------------------------------------------------------------- /DeviceManager/DatabaseModels.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import re 3 | import sqlalchemy 4 | from sqlalchemy import event 5 | 6 | from .app import app 7 | from .utils import HTTPRequestError 8 | from .conf import CONFIG 9 | from .DatabaseHandler import db 10 | 11 | class DeviceOverride(db.Model): 12 | __tablename__ = 'overrides' 13 | 14 | id = db.Column(db.Integer, db.Sequence('override_id'), primary_key=True) 15 | 16 | did = db.Column(db.String(8), db.ForeignKey('devices.id')) 17 | aid = db.Column(db.Integer, db.ForeignKey('attrs.id')) 18 | 19 | device = db.relationship('Device', back_populates='overrides') 20 | attr = db.relationship('DeviceAttr', back_populates='overrides') 21 | 22 | static_value = db.Column(db.String(128)) 23 | 24 | class DeviceAttr(db.Model): 25 | __tablename__ = 'attrs' 26 | 27 | id = db.Column(db.Integer, db.Sequence('attr_id'), primary_key=True) 28 | label = db.Column(db.String(128), nullable=False) 29 | created = db.Column(db.DateTime, default=datetime.now) 30 | updated = db.Column(db.DateTime, onupdate=datetime.now) 31 | 32 | type = db.Column(db.String(32), nullable=False) 33 | value_type = db.Column(db.String(32), nullable=False) 34 | static_value = db.Column(db.String(128)) 35 | 36 | template_id = db.Column(db.Integer, db.ForeignKey('templates.id')) 37 | template = db.relationship("DeviceTemplate", back_populates="attrs") 38 | 39 | parent_id = db.Column(db.Integer, db.ForeignKey('attrs.id')) 40 | parent = db.relationship("DeviceAttr", remote_side=[id], back_populates="children") 41 | children = db.relationship("DeviceAttr", back_populates="parent", cascade="delete") 42 | 43 | # remove known overrides if this attribute is removed 44 | overrides = db.relationship('DeviceOverride', cascade="delete") 45 | 46 | # remove known pre shared keys if this attribute is removed 47 | pre_shared_keys = db.relationship('DeviceAttrsPsk', 48 | cascade="delete", 49 | back_populates="attrs") 50 | 51 | # Any given template must not possess two attributes with the same type, label 52 | __table_args__ = ( 53 | sqlalchemy.UniqueConstraint('template_id', 'type', 'label'), 54 | sqlalchemy.CheckConstraint("((template_id IS NULL) AND NOT (parent_id IS NULL)) OR \ 55 | (NOT (template_id IS NULL) AND (parent_id IS NULL))") 56 | ) 57 | 58 | def __repr__(self): 59 | children_str="" 60 | for child in self.children: 61 | children_str += "«{}:{}»".format(child.label, child.static_value) 62 | 63 | return "".format( 64 | self.label, self.type, self.value_type, children_str, self.parent) 65 | 66 | 67 | class DeviceTemplate(db.Model): 68 | __tablename__ = 'templates' 69 | 70 | id = db.Column(db.Integer, db.Sequence('template_id'), primary_key=True) 71 | label = db.Column(db.String(128), nullable=False) 72 | created = db.Column(db.DateTime, default=datetime.now) 73 | updated = db.Column(db.DateTime, onupdate=datetime.now) 74 | 75 | attrs = db.relationship("DeviceAttr", 76 | back_populates="template", 77 | lazy='joined', 78 | cascade="delete") 79 | 80 | devices = db.relationship("Device", 81 | secondary='device_template', 82 | back_populates="templates", 83 | passive_deletes='all') 84 | 85 | config_attrs = db.relationship('DeviceAttr', 86 | primaryjoin=db.and_(DeviceAttr.template_id == id, 87 | DeviceAttr.type != 'static', 88 | DeviceAttr.type != 'dynamic', 89 | DeviceAttr.type != 'actuator')) 90 | data_attrs = db.relationship('DeviceAttr', 91 | primaryjoin=db.and_(DeviceAttr.template_id == id, 92 | DeviceAttr.type.in_(('static', 'dynamic', 'actuator')))) 93 | 94 | def __repr__(self): 95 | return "".format(self.label, self.attrs) 96 | 97 | 98 | class Device(db.Model): 99 | __tablename__ = 'devices' 100 | 101 | id = db.Column(db.String(8), unique=True, nullable=False, primary_key=True) 102 | label = db.Column(db.String(128), nullable=False) 103 | created = db.Column(db.DateTime, default=datetime.now) 104 | updated = db.Column(db.DateTime, onupdate=datetime.now) 105 | 106 | templates = db.relationship("DeviceTemplate", 107 | secondary='device_template', 108 | back_populates="devices", 109 | lazy='joined') 110 | 111 | overrides = db.relationship("DeviceOverride", 112 | back_populates="device", 113 | cascade="delete", 114 | lazy='joined') 115 | 116 | pre_shared_keys = db.relationship('DeviceAttrsPsk', 117 | cascade='delete', 118 | back_populates="devices", 119 | lazy='joined') 120 | 121 | persistence = db.Column(db.String(128)) 122 | 123 | def __repr__(self): 124 | return "" % self.label 125 | 126 | 127 | class DeviceTemplateMap(db.Model): 128 | __tablename__ = 'device_template' 129 | device_id = db.Column(db.String(8), db.ForeignKey('devices.id'), 130 | primary_key=True, index=True) 131 | template_id = db.Column(db.Integer, db.ForeignKey('templates.id'), 132 | primary_key=True, index=True, nullable=False) 133 | 134 | 135 | class DeviceAttrsPsk(db.Model): 136 | __tablename__ = 'pre_shared_keys' 137 | 138 | attr_id = db.Column(db.Integer, db.ForeignKey('attrs.id'), primary_key=True) 139 | device_id = db.Column(db.String(8), db.ForeignKey('devices.id'), primary_key=True) 140 | psk = db.Column(db.Binary(1024), nullable=False) 141 | 142 | devices = db.relationship("Device", back_populates="pre_shared_keys") 143 | attrs = db.relationship("DeviceAttr", back_populates="pre_shared_keys") 144 | 145 | def __repr__(self): 146 | return "" % ( 147 | self.device_id, self.attr_id, self.psk) 148 | 149 | 150 | def assert_device_exists(device_id, session=None): 151 | """ 152 | Assert that a device exists, returning the object retrieved from the 153 | database. 154 | """ 155 | try: 156 | if session: 157 | with session.no_autoflush: 158 | return session.query(Device).filter_by(id=device_id).one() 159 | else: 160 | return Device.query.filter_by(id=device_id).one() 161 | except sqlalchemy.orm.exc.NoResultFound: 162 | raise HTTPRequestError(404, "No such device: %s" % device_id) 163 | 164 | 165 | def assert_template_exists(template_id, session=None): 166 | try: 167 | if session: 168 | with session.no_autoflush: 169 | return session.query(DeviceTemplate).filter_by(id=template_id).one() 170 | else: 171 | return DeviceTemplate.query.filter_by(id=template_id).one() 172 | except sqlalchemy.orm.exc.NoResultFound: 173 | raise HTTPRequestError(404, "No such template: %s" % template_id) 174 | 175 | 176 | def assert_device_relation_exists(device_id, template_id): 177 | try: 178 | return DeviceTemplateMap.query.filter_by(device_id=device_id, template_id=template_id).one() 179 | except sqlalchemy.orm.exc.NoResultFound: 180 | raise HTTPRequestError(404, "Device %s is not associated with template %s" % (device_id, template_id)) 181 | 182 | 183 | def handle_consistency_exception(error): 184 | raise HTTPRequestError(400, error.orig.diag.message_primary) 185 | 186 | 187 | @event.listens_for(DeviceAttr.value_type, 'set') 188 | def receive_set(target, value, old_value, initiator): 189 | # we need to watch DeviceAttr's value_type to clean the pre_shared_key if 190 | # the value changes from psk to something else 191 | if old_value == 'psk' and value != 'psk': 192 | for key in target.pre_shared_keys: 193 | db.session.delete(key) 194 | -------------------------------------------------------------------------------- /DeviceManager/ErrorManager.py: -------------------------------------------------------------------------------- 1 | """ Error pages definitions """ 2 | 3 | import json 4 | from flask import make_response, jsonify 5 | from DeviceManager.app import app 6 | 7 | 8 | @app.errorhandler(404) 9 | def not_found(e): 10 | return make_response(jsonify({"msg": "Invalid endpoint requested"}), 404) 11 | 12 | 13 | @app.errorhandler(500) 14 | def internal_error(e): 15 | return make_response(jsonify({"msg": "Internal Error"}), 500) 16 | -------------------------------------------------------------------------------- /DeviceManager/ImportHandler.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import copy 4 | import json 5 | from flask import Blueprint, request, jsonify, make_response 6 | from sqlalchemy.exc import IntegrityError 7 | from sqlalchemy.sql import func 8 | 9 | from DeviceManager.app import app 10 | from DeviceManager.Logger import Log 11 | from DeviceManager.utils import format_response, HTTPRequestError, retrieve_auth_token 12 | from DeviceManager.conf import CONFIG 13 | from DeviceManager.BackendHandler import KafkaHandler, KafkaInstanceHandler 14 | 15 | from DeviceManager.DatabaseHandler import db 16 | from DeviceManager.DatabaseModels import DeviceTemplate, Device, DeviceAttr, DeviceOverride 17 | from DeviceManager.SerializationModels import import_schema 18 | from DeviceManager.SerializationModels import parse_payload, load_attrs 19 | from DeviceManager.SerializationModels import ValidationError 20 | from DeviceManager.TenancyManager import init_tenant_context 21 | from DeviceManager.DeviceHandler import auto_create_template, serialize_full_device 22 | 23 | importing = Blueprint('import', __name__) 24 | 25 | LOGGER = Log().color_log() 26 | 27 | 28 | class ImportHandler: 29 | 30 | kafka = KafkaInstanceHandler() 31 | 32 | def __init__(self): 33 | pass 34 | 35 | def drop_sequences(): 36 | db.session.execute("DROP SEQUENCE template_id") 37 | db.session.execute("DROP SEQUENCE attr_id") 38 | LOGGER.info(f" Removed sequences") 39 | 40 | def replace_ids_by_import_ids(my_json): 41 | new_json = json.loads(my_json) 42 | return json.dumps(new_json).replace('\"id\":', '\"import_id\":') 43 | 44 | def restore_template_sequence(): 45 | max_template_id = 1 46 | current_max_template_id = db.session.query( 47 | func.max(DeviceTemplate.id)).scalar() 48 | if current_max_template_id is not None: 49 | max_template_id = current_max_template_id + 1 50 | db.session.execute( 51 | "CREATE SEQUENCE template_id START {}".format(str(max_template_id))) 52 | 53 | def restore_attr_sequence(): 54 | max_attr_id = 1 55 | current_max_attr_id = db.session.query( 56 | func.max(DeviceAttr.id)).scalar() 57 | if current_max_attr_id is not None: 58 | max_attr_id = current_max_attr_id + 1 59 | db.session.execute( 60 | "CREATE SEQUENCE attr_id START {}".format(str(max_attr_id))) 61 | 62 | def restore_sequences(): 63 | ImportHandler.restore_template_sequence() 64 | ImportHandler.restore_attr_sequence() 65 | LOGGER.info(f" Restored sequences") 66 | 67 | @classmethod 68 | def notifies_deletion_to_kafka(cls, device, tenant): 69 | data = serialize_full_device(device, tenant) 70 | kafka_handler_instance = cls.kafka.getInstance(cls.kafka.kafkaNotifier) 71 | kafka_handler_instance.remove(data, meta={"service": tenant}) 72 | 73 | def delete_records(tenant): 74 | overrides = db.session.query(DeviceOverride) 75 | for override in overrides: 76 | db.session.delete(override) 77 | LOGGER.info(f" Deleted overrides") 78 | 79 | devices = db.session.query(Device) 80 | for device in devices: 81 | db.session.delete(device) 82 | ImportHandler.notifies_deletion_to_kafka(device, tenant) 83 | LOGGER.info(f" Deleted devices") 84 | 85 | templates = db.session.query(DeviceTemplate) 86 | for template in templates: 87 | db.session.delete(template) 88 | LOGGER.info(f" Deleted templates") 89 | 90 | def clear_db_config(tenant): 91 | ImportHandler.drop_sequences() 92 | ImportHandler.delete_records(tenant) 93 | db.session.flush() 94 | 95 | def restore_db_config(): 96 | ImportHandler.restore_sequences() 97 | 98 | def save_templates(json_data, json_payload): 99 | saved_templates = [] 100 | for template in json_data['templates']: 101 | loaded_template = DeviceTemplate(**template) 102 | for json in json_payload['templates']: 103 | if(json['import_id'] == template["id"]): 104 | load_attrs(json['attrs'], loaded_template, DeviceAttr, db) 105 | db.session.add(loaded_template) 106 | saved_templates.append(loaded_template) 107 | 108 | LOGGER.info(f" Saved templates") 109 | return saved_templates 110 | 111 | def set_templates_on_device(loaded_device, json, saved_templates): 112 | loaded_device.templates = [] 113 | for template_id in json.get('templates', []): 114 | for saved_template in saved_templates: 115 | if(template_id == saved_template.id): 116 | loaded_device.templates.append(saved_template) 117 | 118 | auto_create_template(json, loaded_device) 119 | 120 | def save_devices(json_data, json_payload, saved_templates): 121 | saved_devices = [] 122 | for device in json_data['devices']: 123 | device.pop('templates', None) 124 | loaded_device = Device(**device) 125 | for json in json_payload['devices']: 126 | if(json['id'] == device["id"]): 127 | ImportHandler.set_templates_on_device( 128 | loaded_device, json, saved_templates) 129 | 130 | db.session.add(loaded_device) 131 | saved_devices.append(loaded_device) 132 | 133 | LOGGER.info(f" Saved devices") 134 | return saved_devices 135 | 136 | def notifies_creation_to_kafka(cls, saved_devices, tenant): 137 | kafka_handler_instance = cls.kafka.getInstance(cls.kafka.kafkaNotifier) 138 | for orm_device in saved_devices: 139 | full_device = serialize_full_device(orm_device, tenant) 140 | kafka_handler_instance.create( 141 | full_device, meta={"service": tenant}) 142 | 143 | @staticmethod 144 | def import_data(data, token, content_type): 145 | """ 146 | Import data. 147 | 148 | :param data: The received data HTTP request, as created by Flask. 149 | :param token: The authorization token (JWT). 150 | :param content_type: The content_type of request (application/json) 151 | :return The status message. 152 | :raises HTTPRequestError: If no authorization token was provided (no 153 | tenant was informed) 154 | :raises HTTPRequestError: If import attribute constraints were 155 | violated. This might happen if two attributes have the same name, for 156 | instance. 157 | """ 158 | 159 | saved_templates = [] 160 | saved_devices = [] 161 | 162 | try: 163 | tenant = init_tenant_context(token, db) 164 | 165 | ImportHandler.clear_db_config(tenant) 166 | 167 | original_req_data = copy.copy(data) 168 | 169 | original_payload = json.loads(original_req_data) 170 | 171 | data = ImportHandler.replace_ids_by_import_ids(data) 172 | 173 | json_data, json_payload = parse_payload( 174 | content_type, data, import_schema) 175 | 176 | saved_templates = ImportHandler.save_templates( 177 | json_data, json_payload) 178 | 179 | saved_devices = ImportHandler.save_devices( 180 | json_data, original_payload, saved_templates) 181 | 182 | ImportHandler.restore_db_config() 183 | 184 | ImportHandler().notifies_creation_to_kafka(saved_devices, tenant) 185 | 186 | db.session.commit() 187 | 188 | except IntegrityError as e: 189 | LOGGER.error(f' {e}') 190 | db.session.rollback() 191 | raise HTTPRequestError( 192 | 400, 'Template attribute constraints are violated by the request') 193 | except Exception as e: 194 | LOGGER.error(f' {e}') 195 | db.session.rollback() 196 | raise HTTPRequestError(400, 'Failed to import data') 197 | finally: 198 | db.session.close() 199 | 200 | results = { 201 | 'message': 'data imported!' 202 | } 203 | return results 204 | 205 | 206 | @importing.route('/import', methods=['POST']) 207 | def flask_import_data(): 208 | try: 209 | LOGGER.info(f" Starting importing data...") 210 | 211 | # retrieve the authorization token 212 | token = retrieve_auth_token(request) 213 | 214 | # retrieve header and body of request 215 | content_type = request.headers.get('Content-Type') 216 | data = request.data 217 | 218 | result = ImportHandler.import_data(data, token, content_type) 219 | 220 | LOGGER.info(f" Imported data!") 221 | 222 | return make_response(jsonify(result), 201) 223 | 224 | except ValidationError as e: 225 | results = {'message': 'failed to parse attr', 'errors': e} 226 | LOGGER.error(f" {e}") 227 | return make_response(jsonify(results), 400) 228 | except HTTPRequestError as error: 229 | LOGGER.error(f" {error.message}") 230 | if isinstance(error.message, dict): 231 | return make_response(jsonify(error.message), error.error_code) 232 | return format_response(error.error_code, error.message) 233 | 234 | 235 | app.register_blueprint(importing) 236 | -------------------------------------------------------------------------------- /DeviceManager/KafkaNotifier.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import logging 3 | import json 4 | 5 | import requests 6 | from kafka import KafkaProducer 7 | from kafka.errors import KafkaTimeoutError 8 | 9 | from DeviceManager.conf import CONFIG 10 | from DeviceManager.Logger import Log 11 | from datetime import datetime 12 | import time 13 | 14 | 15 | LOGGER = Log().color_log() 16 | 17 | 18 | class DeviceEvent: 19 | CREATE = "create" 20 | UPDATE = "update" 21 | REMOVE = "remove" 22 | CONFIGURE = "configure" 23 | TEMPLATE = "template.update" 24 | 25 | 26 | class NotificationMessage: 27 | event = "" 28 | data = None 29 | meta = None 30 | 31 | def __init__(self, ev, d, m): 32 | self.event = ev 33 | self.data = d 34 | self.meta = m 35 | 36 | def to_json(self): 37 | return {"event": self.event, "data": self.data, "meta": self.meta} 38 | 39 | 40 | class KafkaNotifier: 41 | 42 | def __init__(self): 43 | self.kafka_address = CONFIG.kafka_host + ':' + CONFIG.kafka_port 44 | self.kf_prod = None 45 | 46 | self.kf_prod = KafkaProducer(value_serializer=lambda v: json.dumps(v).encode('utf-8'), 47 | bootstrap_servers=self.kafka_address) 48 | 49 | # Maps services to their managed topics 50 | self.topic_map = {} 51 | 52 | def get_topic(self, service, subject): 53 | if service in self.topic_map.keys(): 54 | if subject in self.topic_map[service].keys(): 55 | return self.topic_map[service][subject] 56 | 57 | target = "{}/topic/{}".format(CONFIG.data_broker, subject) 58 | userinfo = { 59 | "username": "device-manager", 60 | "service": service 61 | } 62 | 63 | jwt = "{}.{}.{}".format(base64.b64encode("model".encode()).decode(), 64 | base64.b64encode(json.dumps( 65 | userinfo).encode()).decode(), 66 | base64.b64encode("signature".encode()).decode()) 67 | 68 | response = requests.get(target, headers={"authorization": jwt}) 69 | if 200 <= response.status_code < 300: 70 | payload = response.json() 71 | if self.topic_map.get(service, None) is None: 72 | self.topic_map[service] = {} 73 | self.topic_map[service][subject] = payload['topic'] 74 | return payload['topic'] 75 | return None 76 | 77 | def send_notification(self, event, device, meta): 78 | # TODO What if Kafka is not yet up? 79 | 80 | full_msg = NotificationMessage(event, device, meta) 81 | try: 82 | topic = self.get_topic(meta['service'], CONFIG.subject) 83 | LOGGER.debug(f" topic for {CONFIG.subject} is {topic}") 84 | if topic is None: 85 | LOGGER.error(f" Failed to retrieve named topic to publish to") 86 | 87 | self.kf_prod.send(topic, full_msg.to_json()) 88 | self.kf_prod.flush() 89 | except KafkaTimeoutError: 90 | LOGGER.error(f" Kafka timed out.") 91 | 92 | def send_raw(self, raw_data, tenant): 93 | try: 94 | topic = self.get_topic(tenant, CONFIG.subject) 95 | if topic is None: 96 | LOGGER.error(f" Failed to retrieve named topic to publish to") 97 | self.kf_prod.send(topic, raw_data) 98 | self.kf_prod.flush() 99 | except KafkaTimeoutError: 100 | LOGGER.error(f" Kafka timed out.") 101 | -------------------------------------------------------------------------------- /DeviceManager/Logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging import config as config_log 3 | from colorlog import ColoredFormatter 4 | 5 | from DeviceManager.conf import CONFIG 6 | from DeviceManager.utils import HTTPRequestError 7 | 8 | class Log: 9 | 10 | def __init__(self, LOG_LEVEL = CONFIG.log_level, 11 | LOG_FORMAT = "[%(log_color)s%(asctime)-8s%(reset)s] |%(log_color)s%(module)-8s%(reset)s| %(log_color)s%(levelname)s%(reset)s: %(log_color)s%(message)s%(reset)s", DISABLED = False): 12 | 13 | #Disable all others modules logs 14 | LOGGING = { 15 | 'version': 1, 16 | 'disable_existing_loggers': True, 17 | } 18 | 19 | dateFormat = '%d/%m/%y - %H:%M:%S' 20 | config_log.dictConfig(LOGGING) 21 | self.formatter = ColoredFormatter(LOG_FORMAT, dateFormat) 22 | self.log = logging.getLogger('device-manager.' + __name__) 23 | self.log.setLevel(LOG_LEVEL) 24 | self.log.disabled = DISABLED 25 | self.level = LOG_LEVEL 26 | 27 | if not getattr(self.log, 'handler_set', None): 28 | self.stream = logging.StreamHandler() 29 | self.stream.setLevel(LOG_LEVEL) 30 | self.stream.setFormatter(self.formatter) 31 | self.log.setLevel(LOG_LEVEL) 32 | self.log.addHandler(self.stream) 33 | self.log.handler_set = True 34 | 35 | def update_log_level(self, LEVEL): 36 | levelToName = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'] 37 | 38 | try: 39 | self.log = logging.getLogger('device-manager.' + __name__) 40 | for hdlr in self.log.handlers[:]: 41 | self.log.removeHandler(hdlr) 42 | 43 | self.stream = logging.StreamHandler() 44 | self.stream.setLevel(LEVEL) 45 | self.stream.setFormatter(self.formatter) 46 | 47 | self.log.setLevel(LEVEL) 48 | self.log.addHandler(self.stream) 49 | self.log.handler_set = True 50 | 51 | self.level = LEVEL 52 | 53 | except ValueError: 54 | raise HTTPRequestError(400, "Unknown level: {} valid are {}".format(LEVEL, levelToName)) 55 | 56 | def get_log_level(self): 57 | return self.level 58 | 59 | def color_log(self): 60 | return self.log 61 | -------------------------------------------------------------------------------- /DeviceManager/LoggerHandler.py: -------------------------------------------------------------------------------- 1 | from flask import Blueprint, request, jsonify, make_response 2 | 3 | from DeviceManager.conf import CONFIG 4 | from DeviceManager.Logger import Log 5 | 6 | from DeviceManager.app import app 7 | 8 | from DeviceManager.SerializationModels import parse_payload, load_attrs 9 | from DeviceManager.SerializationModels import log_schema 10 | from DeviceManager.utils import format_response, HTTPRequestError, get_pagination 11 | 12 | logger = Blueprint('log', __name__) 13 | LOG = Log() 14 | 15 | class LoggerHandler: 16 | 17 | def _init_(self): 18 | pass 19 | 20 | @staticmethod 21 | def update_log_level(level): 22 | """ 23 | Update the log level of device manager. 24 | 25 | :param level: Receive a string containing the new log level. 26 | :raises HTTPRequestError: If no authorization token was provided (no 27 | tenant was informed) or the request body contains some error. 28 | """ 29 | 30 | LOG.update_log_level(level.upper()) 31 | 32 | return True 33 | 34 | @staticmethod 35 | def get_log_level(): 36 | """ 37 | Fetches the log level configured. 38 | 39 | :return A JSON containing the log level. 40 | :rtype JSON 41 | """ 42 | 43 | result = { 44 | 'level': LOG.get_log_level() 45 | } 46 | 47 | return result 48 | 49 | 50 | @logger.route('/log', methods=['PUT']) 51 | def flask_update_log_level(): 52 | try: 53 | content_type = request.headers.get('Content-Type') 54 | data_request = request.data 55 | 56 | _, json_payload = parse_payload(content_type, data_request, log_schema) 57 | LoggerHandler.update_log_level(json_payload['level']) 58 | 59 | return make_response('', 200) 60 | 61 | except HTTPRequestError as error: 62 | if isinstance(error.message, dict): 63 | return make_response(jsonify(error.message), error.error_code) 64 | return format_response(error.error_code, error.message) 65 | 66 | @logger.route('/log', methods=['GET']) 67 | def flask_get_log_level(): 68 | result = LoggerHandler.get_log_level() 69 | 70 | return make_response(jsonify(result), 200) 71 | 72 | app.register_blueprint(logger) 73 | -------------------------------------------------------------------------------- /DeviceManager/SerializationModels.py: -------------------------------------------------------------------------------- 1 | # object to json sweetness 2 | import json 3 | import re 4 | from marshmallow import Schema, fields, post_dump, post_load, ValidationError 5 | 6 | from DeviceManager.utils import HTTPRequestError 7 | from DeviceManager.DatabaseModels import DeviceAttr 8 | from DeviceManager.Logger import Log 9 | 10 | LOGGER = Log().color_log() 11 | 12 | def validate_attr_label(input): 13 | if re.match(r'^[a-zA-Z0-9_-]+$', input) is None: 14 | raise ValidationError("Labels must contain letters, numbers or dashes(-_)") 15 | 16 | def validate_children_attr_label(attr_label): 17 | unique = { each['label'] : each for each in attr_label }.values() 18 | if len(attr_label) > len(unique): 19 | raise ValidationError('a template can not have repeated attributes') 20 | 21 | def set_id_with_import_id(data): 22 | if 'import_id' in data and data['import_id'] is not None: 23 | data['id'] = data['import_id'] 24 | del(data['import_id']) 25 | return data 26 | 27 | def validate_repeated_attrs(data): 28 | if ('attrs' in data): 29 | try: 30 | uniques = { each['label'] : each for each in data['attrs'] }.values() 31 | if len(data['attrs']) > len(uniques): 32 | raise ValidationError('a device can not have repeated attributes') 33 | except KeyError: 34 | raise ValidationError('missing label attribute') 35 | 36 | class MetaSchema(Schema): 37 | id = fields.Int(dump_only=True) 38 | import_id = fields.Int(load_only=True) 39 | label = fields.Str(required=True) 40 | created = fields.DateTime(dump_only=True) 41 | updated = fields.DateTime(dump_only=True) 42 | type = fields.Str(required=True) 43 | value_type = fields.Str(required=True) 44 | static_value = fields.Field() 45 | is_static_overridden = fields.Bool(allow_none=True) 46 | 47 | @post_load 48 | def set_import_id(self, data): 49 | return set_id_with_import_id(data) 50 | 51 | metaattr_schema = MetaSchema() 52 | 53 | class AttrSchema(Schema): 54 | id = fields.Int() 55 | import_id = fields.Int(load_only=True) 56 | label = fields.Str(required=True, validate=validate_attr_label, allow_none=False, missing=None) 57 | created = fields.DateTime(dump_only=True) 58 | updated = fields.DateTime(dump_only=True) 59 | type = fields.Str(required=True) 60 | value_type = fields.Str(required=True) 61 | static_value = fields.Field(allow_none=True) 62 | is_static_overridden = fields.Bool(allow_none=True) 63 | template_id = fields.Str(dump_only=True) 64 | 65 | metadata = fields.Nested(MetaSchema, many=True, attribute='children', validate=validate_children_attr_label) 66 | 67 | @post_load 68 | def set_import_id(self, data): 69 | return set_id_with_import_id(data) 70 | 71 | @post_dump 72 | def remove_null_values(self, data): 73 | return { 74 | key: value for key, value in data.items() \ 75 | if (value is not None) and ((isinstance(value, list) and len(value)) or not isinstance(value, list)) 76 | } 77 | 78 | attr_schema = AttrSchema() 79 | attr_list_schema = AttrSchema(many=True) 80 | 81 | class TemplateSchema(Schema): 82 | id = fields.Int(dump_only=True) 83 | import_id = fields.Int(load_only=True) 84 | label = fields.Str(required=True) 85 | created = fields.DateTime(dump_only=True) 86 | updated = fields.DateTime(dump_only=True) 87 | attrs = fields.Nested(AttrSchema, many=True, dump_only=True) 88 | data_attrs = fields.Nested(AttrSchema, many=True, dump_only=True) 89 | config_attrs = fields.Nested(AttrSchema, many=True, dump_only=True) 90 | 91 | @post_load 92 | def set_import_id(self, data): 93 | return set_id_with_import_id(data) 94 | 95 | @post_dump 96 | def remove_null_values(self, data): 97 | return {key: value for key, value in data.items() if value is not None} 98 | 99 | template_schema = TemplateSchema() 100 | template_list_schema = TemplateSchema(many=True) 101 | 102 | class DeviceSchema(Schema): 103 | id = fields.String(dump_only=True) 104 | import_id = fields.String(load_only=True) 105 | label = fields.Str(required=True) 106 | created = fields.DateTime(dump_only=True) 107 | updated = fields.DateTime(dump_only=True) 108 | templates = fields.Nested(TemplateSchema, only=('id'), many=True) 109 | 110 | @post_load 111 | def set_import_id(self, data): 112 | return set_id_with_import_id(data) 113 | 114 | @post_dump 115 | def remove_null_values(self, data): 116 | return {key: value for key, value in data.items() if value is not None} 117 | 118 | device_schema = DeviceSchema() 119 | device_list_schema = DeviceSchema(many=True) 120 | 121 | class ImportSchema(Schema): 122 | templates = fields.Nested(TemplateSchema, many=True) 123 | devices = fields.Nested(DeviceSchema, many=True) 124 | 125 | @post_dump 126 | def remove_null_values(self, data): 127 | return {key: value for key, value in data.items() if value is not None} 128 | 129 | import_schema = ImportSchema() 130 | import_list_schema = ImportSchema(many=True) 131 | 132 | class LogSchema(Schema): 133 | level = fields.Str(required=True) 134 | 135 | log_schema = LogSchema() 136 | 137 | def parse_payload(content_type, data_request, schema): 138 | try: 139 | if (content_type is None) or (content_type != "application/json"): 140 | raise HTTPRequestError(400, "Payload must be valid JSON, and Content-Type set accordingly") 141 | json_payload = json.loads(data_request) 142 | data = schema.load(json_payload) 143 | except ValueError: 144 | raise HTTPRequestError(400, "Payload must be valid JSON, and Content-Type set accordingly") 145 | except ValidationError as errors: 146 | results = {'message': 'failed to parse input', 'errors': errors.messages} 147 | raise HTTPRequestError(400, results) 148 | return data, json_payload 149 | 150 | def load_attrs(attr_list, parent_template, base_type, db): 151 | """ 152 | 153 | :rtype: 154 | """ 155 | for attr in attr_list: 156 | try: 157 | entity = attr_schema.load(attr) 158 | try: 159 | children = entity.pop('children') 160 | except KeyError: 161 | children = [] 162 | 163 | orm_entity = base_type(template=parent_template, **entity) 164 | db.session.add(orm_entity) 165 | 166 | for child in children: 167 | orm_child = DeviceAttr(parent=orm_entity, **child) 168 | db.session.add(orm_child) 169 | except ValidationError as errors: 170 | results = {'message': 'failed to parse attr', 'errors': errors.messages} 171 | raise HTTPRequestError(400, results) 172 | -------------------------------------------------------------------------------- /DeviceManager/TemplateHandler.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | from flask import Blueprint, request, jsonify, make_response 4 | from flask_sqlalchemy import BaseQuery, Pagination 5 | from sqlalchemy.exc import IntegrityError 6 | from sqlalchemy.sql import text, collate, func 7 | 8 | from DeviceManager.DatabaseHandler import db 9 | from DeviceManager.DatabaseModels import handle_consistency_exception, assert_template_exists, assert_device_exists 10 | from DeviceManager.DatabaseModels import DeviceTemplate, DeviceAttr, DeviceTemplateMap 11 | from DeviceManager.SerializationModels import template_list_schema, template_schema 12 | from DeviceManager.SerializationModels import attr_list_schema, attr_schema, metaattr_schema 13 | from DeviceManager.SerializationModels import parse_payload, load_attrs 14 | from DeviceManager.SerializationModels import ValidationError 15 | from DeviceManager.TenancyManager import init_tenant_context 16 | from DeviceManager.KafkaNotifier import KafkaNotifier, DeviceEvent 17 | 18 | from DeviceManager.app import app 19 | from DeviceManager.utils import format_response, HTTPRequestError, get_pagination, retrieve_auth_token 20 | 21 | from DeviceManager.Logger import Log 22 | from datetime import datetime 23 | 24 | from DeviceManager.BackendHandler import KafkaHandler, KafkaInstanceHandler 25 | from DeviceManager.DeviceHandler import serialize_full_device 26 | 27 | import time 28 | import json 29 | 30 | template = Blueprint('template', __name__) 31 | 32 | LOGGER = Log().color_log() 33 | 34 | def attr_format(attrs_format, result): 35 | """ formats output attr list acording to user input """ 36 | 37 | def remove(d,k): 38 | try: 39 | LOGGER.info(f' will remove {k}') 40 | d.pop(k) 41 | except KeyError: 42 | pass 43 | 44 | if attrs_format == 'split': 45 | remove(result, 'attrs') 46 | elif attrs_format == 'single': 47 | remove(result, 'config_attrs') 48 | remove(result, 'data_attrs') 49 | 50 | return result 51 | 52 | def paginate(query, page, per_page=20, error_out=False): 53 | if error_out and page < 1: 54 | return None 55 | items = query.limit(per_page).offset((page - 1) * per_page).all() 56 | if not items and page != 1 and error_out: 57 | return None 58 | 59 | if page == 1 and len(items) < per_page: 60 | total = len(items) 61 | else: 62 | total = query.count() 63 | 64 | return Pagination(query, page, per_page, total, items) 65 | 66 | def refresh_template_update_column(db, template): 67 | if db.session.new or db.session.deleted: 68 | LOGGER.debug('The template structure has changed, refreshing "updated" column.') 69 | template.updated = datetime.now() 70 | 71 | class TemplateHandler(): 72 | 73 | kafka = KafkaInstanceHandler() 74 | 75 | def __init__(self): 76 | pass 77 | 78 | @staticmethod 79 | def get_templates(params, token): 80 | """ 81 | Fetches known templates, potentially limited by a given value. Ordering 82 | might be user-configurable too. 83 | 84 | :param params: Parameters received from request (page_number, per_page, 85 | sort_by, attr, attr_type, label, attrs_format) 86 | as created by Flask 87 | :param token: The authorization token (JWT). 88 | :return A JSON containing pagination information and the template list 89 | :rtype JSON 90 | :raises HTTPRequestError: If no authorization token was provided (no 91 | tenant was informed) 92 | """ 93 | LOGGER.debug(f"Retrieving templates.") 94 | LOGGER.debug(f"Initializing tenant context...") 95 | init_tenant_context(token, db) 96 | LOGGER.debug(f"... tenant context initialized.") 97 | 98 | pagination = {'page': params.get('page_number'), 'per_page': params.get('per_page'), 'error_out': False} 99 | 100 | LOGGER.debug(f"Pagination configuration is {pagination}") 101 | 102 | parsed_query = [] 103 | query = params.get('attr') 104 | 105 | for attr in query: 106 | LOGGER.debug(f"Analyzing query parameter: {attr}...") 107 | parsed = re.search('^(.+){1}=(.+){1}$', attr) 108 | parsed_query.append(DeviceAttr.label == parsed.group(1)) 109 | parsed_query.append(DeviceAttr.static_value == parsed.group(2)) 110 | LOGGER.debug("... query parameter was added to filter list.") 111 | 112 | query = params.get('attr_type') 113 | 114 | for attr_type_item in query: 115 | parsed_query.append(DeviceAttr.value_type == attr_type_item) 116 | 117 | target_label = params.get('label') 118 | 119 | if target_label: 120 | LOGGER.debug(f"Adding label filter to query...") 121 | parsed_query.append(DeviceTemplate.label.like("%{}%".format(target_label))) 122 | LOGGER.debug(f"... filter was added to query.") 123 | 124 | SORT_CRITERION = { 125 | 'label': DeviceTemplate.label, 126 | None: None 127 | } 128 | sortBy = SORT_CRITERION.get(params.get('sortBy'), None) 129 | 130 | LOGGER.debug(f"Sortby filter is {sortBy}") 131 | if parsed_query: 132 | LOGGER.debug(f" Filtering template by {parsed_query}") 133 | 134 | # Always sort by DeviceTemplate.id 135 | page = db.session.query(DeviceTemplate) \ 136 | .join(DeviceAttr, isouter=True) \ 137 | .filter(*parsed_query) \ 138 | .order_by(DeviceTemplate.id) 139 | if sortBy: 140 | page = page.order_by(sortBy) 141 | 142 | page = page.distinct(DeviceTemplate.id) 143 | 144 | LOGGER.debug(f"Current query: {type(page)}") 145 | page = paginate(page, **pagination) 146 | else: 147 | LOGGER.debug(f" Querying templates sorted by {sortBy}") 148 | page = db.session.query(DeviceTemplate).order_by(sortBy).paginate(**pagination) 149 | 150 | templates = [] 151 | for template in page.items: 152 | formatted_template = attr_format(params.get('attrs_format'), template_schema.dump(template)) 153 | LOGGER.debug(f"Adding resulting template to response...") 154 | LOGGER.debug(f"Template is: {formatted_template['label']}") 155 | templates.append(formatted_template) 156 | LOGGER.debug(f"... template was added to response.") 157 | 158 | result = { 159 | 'pagination': { 160 | 'page': page.page, 161 | 'total': page.pages, 162 | 'has_next': page.has_next, 163 | 'next_page': page.next_num 164 | }, 165 | 'templates': templates 166 | } 167 | 168 | LOGGER.debug(f"Full response is {result}") 169 | 170 | return result 171 | 172 | @staticmethod 173 | def create_template(params, token): 174 | """ 175 | Creates a new template. 176 | 177 | :param params: Parameters received from request (content_type, data) 178 | as created by Flask 179 | :param token: The authorization token (JWT). 180 | :return The created template. 181 | :raises HTTPRequestError: If no authorization token was provided (no 182 | tenant was informed) 183 | :raises HTTPRequestError: If template attribute constraints were 184 | violated. This might happen if two attributes have the same name, for 185 | instance. 186 | """ 187 | init_tenant_context(token, db) 188 | 189 | content_type = params.get('content_type') 190 | data_request = params.get('data') 191 | tpl, json_payload = parse_payload(content_type, data_request, template_schema) 192 | 193 | loaded_template = DeviceTemplate(**tpl) 194 | load_attrs(json_payload['attrs'], loaded_template, DeviceAttr, db) 195 | db.session.add(loaded_template) 196 | 197 | try: 198 | db.session.commit() 199 | LOGGER.debug(f" Created template in database") 200 | except IntegrityError as e: 201 | LOGGER.error(f' {e}') 202 | raise HTTPRequestError(400, 'Template attribute constraints are violated by the request') 203 | 204 | results = { 205 | 'template': template_schema.dump(loaded_template), 206 | 'result': 'ok' 207 | } 208 | return results 209 | 210 | @staticmethod 211 | def get_template(params, template_id, token): 212 | """ 213 | Fetches a single template. 214 | 215 | :param req: The received HTTP request, as created by Flask. 216 | :param template_id: The requested template ID. 217 | :return A Template 218 | :rtype Template, as described in DatabaseModels package 219 | :raises HTTPRequestError: If no authorization token was provided (no 220 | tenant was informed) 221 | :raises HTTPRequestError: If this template could not be found in 222 | database. 223 | """ 224 | init_tenant_context(token, db) 225 | tpl = assert_template_exists(template_id) 226 | json_template = template_schema.dump(tpl) 227 | attr_format(params.get('attr_format'), json_template) 228 | return json_template 229 | 230 | @staticmethod 231 | def delete_all_templates(token): 232 | """ 233 | Deletes all templates. 234 | 235 | :param token: The authorization token (JWT). 236 | :raises HTTPRequestError: If this template could not be found in 237 | database. 238 | """ 239 | init_tenant_context(token, db) 240 | json_templates = [] 241 | 242 | try: 243 | templates = db.session.query(DeviceTemplate) 244 | for template in templates: 245 | db.session.delete(template) 246 | json_templates.append(template_schema.dump(template)) 247 | 248 | db.session.commit() 249 | except IntegrityError: 250 | raise HTTPRequestError(400, "Templates cannot be removed as they are being used by devices") 251 | 252 | results = { 253 | 'result': 'ok', 254 | 'removed': json_templates 255 | } 256 | 257 | return results 258 | 259 | @staticmethod 260 | def remove_template(template_id, token): 261 | """ 262 | Deletes a single template. 263 | 264 | :param template_id: The template to be removed. 265 | :param token: The authorization token (JWT). 266 | :return The removed template. 267 | :rtype JSON 268 | :raises HTTPRequestError: If no authorization token was provided (no 269 | tenant was informed) 270 | :raises HTTPRequestError: If this template could not be found in 271 | database. 272 | :raises HTTPRequestError: If the template is being currently used by 273 | a device. 274 | """ 275 | init_tenant_context(token, db) 276 | tpl = assert_template_exists(template_id) 277 | 278 | json_template = template_schema.dump(tpl) 279 | try: 280 | db.session.delete(tpl) 281 | db.session.commit() 282 | except IntegrityError: 283 | raise HTTPRequestError(400, "Templates cannot be removed as they are being used by devices") 284 | 285 | results = { 286 | 'result': 'ok', 287 | 'removed': json_template 288 | } 289 | 290 | return results 291 | 292 | @classmethod 293 | def update_template(cls, params, template_id, token): 294 | """ 295 | Updates a single template. 296 | 297 | :param params: Parameters received from request (content_type, data) 298 | as created by Flask 299 | :param template_id: The template to be updated. 300 | :param token: The authorization token (JWT). 301 | :return The old version of this template (previous to the update). 302 | :rtype JSON 303 | :raises HTTPRequestError: If no authorization token was provided (no 304 | tenant was informed) 305 | :raises HTTPRequestError: If this template could not be found in 306 | database. 307 | """ 308 | service = init_tenant_context(token, db) 309 | 310 | content_type = params.get('content_type') 311 | data_request = params.get('data') 312 | 313 | # find old version of the template, if any 314 | old = assert_template_exists(template_id) 315 | # parse updated version from payload 316 | updated, json_payload = parse_payload(content_type, data_request, template_schema) 317 | 318 | LOGGER.debug(f" Current json payload: {json_payload}") 319 | 320 | old.label = updated['label'] 321 | 322 | new = json_payload['attrs'] 323 | LOGGER.debug(f" Checking old template attributes") 324 | def attrs_match(attr_from_db, attr_from_request): 325 | return ((attr_from_db.label == attr_from_request["label"]) and 326 | (attr_from_db.type == attr_from_request["type"])) 327 | 328 | def update_attr(attrs_from_db, attrs_from_request): 329 | attrs_from_db.value_type = attrs_from_request.get('value_type', None) 330 | attrs_from_db.static_value = attrs_from_request.get('static_value', None) 331 | 332 | def validate_attr(attr_from_request, is_meta): 333 | if is_meta is False: 334 | attr_schema.load(attr_from_request) 335 | else: 336 | metaattr_schema.load(attr_from_request) 337 | 338 | def analyze_attrs(attrs_from_db, attrs_from_request, parentAttr=None): 339 | for attr_from_db in attrs_from_db: 340 | found = False 341 | for idx, attr_from_request in enumerate(attrs_from_request): 342 | validate_attr(attr_from_request, parentAttr is not None) 343 | if attrs_match(attr_from_db, attr_from_request): 344 | update_attr(attr_from_db, attr_from_request) 345 | if "metadata" in attr_from_request: 346 | analyze_attrs(attr_from_db.children, attr_from_request["metadata"], attr_from_db) 347 | attrs_from_request.pop(idx) 348 | found = True 349 | break 350 | if not found: 351 | LOGGER.debug(f" Removing attribute {attr_from_db.label}") 352 | db.session.delete(attr_from_db) 353 | if parentAttr and attrs_from_request is not None: 354 | for attr_from_request in attrs_from_request: 355 | orm_child = DeviceAttr(parent=parentAttr, **attr_from_request) 356 | db.session.add(orm_child) 357 | return attrs_from_request 358 | 359 | to_be_added = analyze_attrs(old.attrs, new) 360 | for attr in to_be_added: 361 | LOGGER.debug(f" Adding new attribute {attr}") 362 | if "id" in attr: 363 | del attr["id"] 364 | child = DeviceAttr(template=old, **attr) 365 | db.session.add(child) 366 | if "metadata" in attr and attr["metadata"] is not None: 367 | for metadata in attr["metadata"]: 368 | LOGGER.debug(f" Adding new metadata {metadata}") 369 | orm_child = DeviceAttr(parent=child, **metadata) 370 | db.session.add(orm_child) 371 | try: 372 | LOGGER.debug(f" Commiting new data...") 373 | refresh_template_update_column(db, old) 374 | db.session.commit() 375 | LOGGER.debug("... data committed.") 376 | except IntegrityError as error: 377 | LOGGER.debug(f" ConsistencyException was thrown.") 378 | handle_consistency_exception(error) 379 | 380 | # notify interested parties that a set of devices might have been implicitly updated 381 | affected = db.session.query(DeviceTemplateMap) \ 382 | .filter(DeviceTemplateMap.template_id==template_id) \ 383 | .all() 384 | 385 | affected_devices = [] 386 | 387 | kafka_handler_instance = cls.kafka.getInstance(cls.kafka.kafkaNotifier) 388 | for device in affected: 389 | orm_device = assert_device_exists(device.device_id) 390 | kafka_handler_instance.update(serialize_full_device(orm_device, service), meta={"service": service}) 391 | affected_devices.append(device.device_id) 392 | 393 | event = { 394 | "event": DeviceEvent.TEMPLATE, 395 | "data": { 396 | "affected": affected_devices, 397 | "template": template_schema.dump(old) 398 | }, 399 | "meta": {"service": service} 400 | } 401 | kafka_handler_instance.kafkaNotifier.send_raw(event, service) 402 | 403 | results = { 404 | 'updated': template_schema.dump(old), 405 | 'result': 'ok' 406 | } 407 | return results 408 | 409 | 410 | @template.route('/template', methods=['GET']) 411 | def flask_get_templates(): 412 | try: 413 | # retrieve the authorization token 414 | token = retrieve_auth_token(request) 415 | 416 | # retrieve pagination 417 | page_number, per_page = get_pagination(request) 418 | 419 | params = { 420 | 'page_number': page_number, 421 | 'per_page': per_page, 422 | 'sortBy': request.args.get('sortBy', None), 423 | 'attr': request.args.getlist('attr'), 424 | 'attr_type': request.args.getlist('attr_type'), 425 | 'label': request.args.get('label', None), 426 | 'attrs_format': request.args.get('attr_format', 'both') 427 | } 428 | 429 | result = TemplateHandler.get_templates(params, token) 430 | 431 | for templates in result.get('templates'): 432 | LOGGER.info(f" Getting template with id {templates.get('id')}") 433 | 434 | return make_response(jsonify(result), 200) 435 | 436 | except ValidationError as e: 437 | results = {'message': 'failed to parse attr', 'errors': e} 438 | LOGGER.error(f" {e}") 439 | return make_response(jsonify(results), 500) 440 | 441 | except HTTPRequestError as e: 442 | LOGGER.error(f" {e}") 443 | if isinstance(e.message, dict): 444 | return make_response(jsonify(e.message), e.error_code) 445 | return format_response(e.error_code, e.message) 446 | 447 | 448 | @template.route('/template', methods=['POST']) 449 | def flask_create_template(): 450 | try: 451 | # retrieve the authorization token 452 | token = retrieve_auth_token(request) 453 | 454 | params = { 455 | 'content_type': request.headers.get('Content-Type'), 456 | 'data': request.data 457 | } 458 | 459 | result = TemplateHandler.create_template(params, token) 460 | 461 | LOGGER.info(f"Creating a new template") 462 | 463 | return make_response(jsonify(result), 200) 464 | 465 | except ValidationError as e: 466 | results = {'message': 'failed to parse attr', 'errors': e} 467 | LOGGER.error(f" {e}") 468 | return make_response(jsonify(results), 400) 469 | except HTTPRequestError as error: 470 | LOGGER.error(f" {error}") 471 | if isinstance(error.message, dict): 472 | return make_response(jsonify(error.message), error.error_code) 473 | return format_response(error.error_code, error.message) 474 | 475 | 476 | @template.route('/template', methods=['DELETE']) 477 | def flask_delete_all_templates(): 478 | 479 | try: 480 | # retrieve the authorization token 481 | token = retrieve_auth_token(request) 482 | 483 | result = TemplateHandler.delete_all_templates(token) 484 | 485 | LOGGER.info(f"deleting all templates") 486 | 487 | return make_response(jsonify(result), 200) 488 | 489 | except HTTPRequestError as error: 490 | LOGGER.error(f" {error}") 491 | if isinstance(error.message, dict): 492 | return make_response(jsonify(error.message), error.error_code) 493 | return format_response(error.error_code, error.message) 494 | 495 | 496 | @template.route('/template/', methods=['GET']) 497 | def flask_get_template(template_id): 498 | try: 499 | # retrieve the authorization token 500 | token = retrieve_auth_token(request) 501 | 502 | params = {'attrs_format': request.args.get('attr_format', 'both')} 503 | 504 | result = TemplateHandler.get_template(params, template_id, token) 505 | LOGGER.info(f"Getting template with id: {template_id}") 506 | return make_response(jsonify(result), 200) 507 | except ValidationError as e: 508 | results = {'message': 'failed to parse attr', 'errors': e} 509 | LOGGER.error(f" {e}") 510 | return make_response(jsonify(results), 500) 511 | except HTTPRequestError as e: 512 | LOGGER.error(f" {e}") 513 | if isinstance(e.message, dict): 514 | return make_response(jsonify(e.message), e.error_code) 515 | return format_response(e.error_code, e.message) 516 | 517 | 518 | @template.route('/template/', methods=['DELETE']) 519 | def flask_remove_template(template_id): 520 | try: 521 | # retrieve the authorization token 522 | token = retrieve_auth_token(request) 523 | 524 | result = TemplateHandler.remove_template(template_id, token) 525 | LOGGER.info(f"Removing template with id: {template_id}") 526 | return make_response(jsonify(result), 200) 527 | except ValidationError as e: 528 | results = {'message': 'failed to parse attr', 'errors': e} 529 | LOGGER.error(f" {e.message}") 530 | return make_response(jsonify(results), 500) 531 | except HTTPRequestError as e: 532 | LOGGER.error(f" {e.message}") 533 | if isinstance(e.message, dict): 534 | return make_response(jsonify(e.message), e.error_code) 535 | return format_response(e.error_code, e.message) 536 | 537 | 538 | @template.route('/template/', methods=['PUT']) 539 | def flask_update_template(template_id): 540 | try: 541 | # retrieve the authorization token 542 | token = retrieve_auth_token(request) 543 | 544 | params = { 545 | 'content_type': request.headers.get('Content-Type'), 546 | 'data': request.data 547 | } 548 | 549 | result = TemplateHandler.update_template(params, template_id, token) 550 | LOGGER.info(f"Updating template with id: {template_id}") 551 | return make_response(jsonify(result), 200) 552 | except ValidationError as errors: 553 | results = {'message': 'failed to parse attr', 'errors': errors.messages} 554 | LOGGER.error(f' Error in load attrs {errors.messages}') 555 | return make_response(jsonify(results), 400) 556 | except HTTPRequestError as error: 557 | LOGGER.error(f" {error.message}") 558 | if isinstance(error.message, dict): 559 | return make_response(jsonify(error.message), error.error_code) 560 | return format_response(error.error_code, error.message) 561 | 562 | 563 | app.register_blueprint(template) 564 | -------------------------------------------------------------------------------- /DeviceManager/TenancyManager.py: -------------------------------------------------------------------------------- 1 | import json 2 | from flask import g 3 | from flask_alembic import Alembic 4 | from sqlalchemy.sql import exists, select, text, column 5 | 6 | from DeviceManager.utils import HTTPRequestError, decode_base64, get_allowed_service 7 | from .app import app 8 | 9 | def install_triggers(db, tenant, session=None): 10 | query = """ 11 | SET search_path to {tenant}; 12 | -- template update/creation checks 13 | 14 | CREATE FUNCTION validate_device_attrs() returns trigger as $$ 15 | DECLARE 16 | conflict_count int; 17 | BEGIN 18 | conflict_count := ( 19 | select count(*) from attrs as a 20 | left join device_template as dt on a.template_id = dt.template_id 21 | left join devices as d on d.id = dt.device_id 22 | where dt.template_id != NEW.template_id and 23 | dt.device_id in (select device_id from device_template where template_id = NEW.template_id) and 24 | a.label = NEW.label and a.type = NEW.type 25 | ); 26 | IF (conflict_count != 0) THEN 27 | RAISE 'Attribute % (%) has standing conflicts', NEW.label, NEW.id using ERRCODE = 'unique_violation'; 28 | END IF; 29 | RETURN NEW; 30 | END; 31 | $$ language plpgsql; 32 | 33 | CREATE TRIGGER validate_device_attrs_trigger BEFORE INSERT OR UPDATE ON attrs 34 | FOR EACH ROW EXECUTE PROCEDURE validate_device_attrs(); 35 | 36 | -- template assignment checks 37 | 38 | CREATE FUNCTION validate_device() returns trigger as $$ 39 | DECLARE 40 | conflict_count int; 41 | BEGIN 42 | conflict_count := ( 43 | select count(*) from ( 44 | select * from attrs as attr 45 | inner join device_template as dt on attr.template_id = dt.template_id 46 | where dt.device_id = NEW.device_id 47 | ) as curr 48 | inner join attrs as nattrs on curr.label = nattrs.label and curr.type = nattrs.type and nattrs.template_id = NEW.template_id 49 | ); 50 | IF (conflict_count != 0) THEN 51 | RAISE 'Template (%) cannot be added to device (%) as it has standing attribute conflicts', NEW.template_id, NEW.device_id 52 | using ERRCODE = 'unique_violation'; 53 | END IF; 54 | RETURN NEW; 55 | END; 56 | $$ language plpgsql; 57 | 58 | CREATE TRIGGER validate_device_trigger BEFORE INSERT OR UPDATE ON device_template 59 | FOR EACH ROW EXECUTE PROCEDURE validate_device(); 60 | """.format(tenant=tenant) 61 | if session is None: 62 | session = db.session 63 | session.execute(query) 64 | session.commit() 65 | 66 | def create_tenant(tenant, db): 67 | db.session.execute("create schema \"%s\";" % tenant) 68 | db.session.commit() 69 | 70 | def switch_tenant(tenant, db, session=None): 71 | if session is None: 72 | session = db.session 73 | session.execute("SET search_path TO %s" % tenant) 74 | session.commit() 75 | 76 | def init_tenant(tenant, db): 77 | query = exists(select([text("schema_name")]) 78 | .select_from(text("information_schema.schemata")) 79 | .where(text("schema_name = '%s'" % tenant))) 80 | tenant_exists = db.session.query(query).scalar() 81 | if not tenant_exists: 82 | create_tenant(tenant, db) 83 | switch_tenant(tenant, db) 84 | 85 | # Makes sure alembic install its meta information tables into the db (schema/namespace) 86 | with app.app_context(): 87 | g.tenant = tenant 88 | alembic = Alembic() 89 | alembic.init_app(app, run_mkdir=False) 90 | alembic.upgrade() 91 | 92 | install_triggers(db, tenant) 93 | else: 94 | switch_tenant(tenant, db) 95 | 96 | def list_tenants(session): 97 | query = 'select schema_name from information_schema.schemata;' 98 | tenants = session.execute(query) 99 | result = [] 100 | for i in tenants: 101 | if i.schema_name.startswith('pg'): 102 | continue 103 | if i.schema_name in ['public', 'information_schema']: 104 | continue 105 | 106 | result.append(i.schema_name) 107 | return result 108 | 109 | def init_tenant_context(token, db): 110 | 111 | tenant = get_allowed_service(token) 112 | init_tenant(tenant, db) 113 | return tenant 114 | -------------------------------------------------------------------------------- /DeviceManager/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Following PEP 386 3 | import time 4 | 5 | __version__ = (1,2,0) 6 | -------------------------------------------------------------------------------- /DeviceManager/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask 2 | 3 | app = Flask(__name__) 4 | app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False -------------------------------------------------------------------------------- /DeviceManager/conf.py: -------------------------------------------------------------------------------- 1 | """ Service configuration module """ 2 | 3 | import os 4 | from Crypto.Protocol import KDF 5 | 6 | class Config(object): 7 | """ Abstracts configuration, either retrieved from environment or from ctor arguments """ 8 | def __init__(self, 9 | db="dojot_devm", 10 | dbhost="postgres", 11 | dbuser="postgres", 12 | dbpass=None, 13 | dbdriver="postgresql+psycopg2", 14 | kafka_host="kafka", 15 | kafka_port="9092", 16 | broker="http://data-broker", 17 | subject="dojot.device-manager.device", 18 | device_subject="device-data", 19 | status_timeout="5", 20 | create_db=True, 21 | log_level="INFO"): 22 | # Postgres configuration data 23 | self.dbname = os.environ.get('DBNAME', db) 24 | self.dbhost = os.environ.get('DBHOST', dbhost) 25 | self.dbuser = os.environ.get('DBUSER', dbuser) 26 | self.dbpass = os.environ.get('DBPASS', dbpass) 27 | self.dbdriver = os.environ.get('DBDRIVER', dbdriver) 28 | self.create_db = os.environ.get('CREATE_DB', create_db) 29 | # Kafka configuration 30 | self.kafka_host = os.environ.get('KAFKA_HOST', kafka_host) 31 | self.kafka_port = os.environ.get('KAFKA_PORT', kafka_port) 32 | 33 | # Log configuration 34 | self.log_level = os.environ.get('LOG_LEVEL', log_level) 35 | 36 | # Data broker configuration 37 | # Full baseurl of data-broker 38 | self.data_broker = os.environ.get('BROKER', broker) 39 | 40 | # Which subject to publish new device information to 41 | self.subject = os.environ.get('SUBJECT', subject) 42 | self.device_subject = os.environ.get('DEVICE_SUBJECT', device_subject) 43 | self.status_timeout = int(os.environ.get('STATUS_TIMEOUT', status_timeout)) 44 | 45 | # crypto configuration 46 | if not os.environ.get('DEV_MNGR_CRYPTO_PASS'): 47 | raise Exception("environment variable 'DEV_MNGR_CRYPTO_PASS' not configured") 48 | if not os.environ.get('DEV_MNGR_CRYPTO_IV'): 49 | raise Exception("environment variable 'DEV_MNGR_CRYPTO_IV' not configured") 50 | if not os.environ.get('DEV_MNGR_CRYPTO_SALT'): 51 | raise Exception("environment variable 'DEV_MNGR_CRYPTO_SALT' not configured") 52 | 53 | salt = os.environ.get('DEV_MNGR_CRYPTO_SALT') 54 | salt = salt.encode('ASCII') 55 | password = os.environ.get('DEV_MNGR_CRYPTO_PASS') 56 | key = KDF.PBKDF2(password, salt, dkLen=16, count=1000, prf=None) 57 | self.crypto = { 'key': key, 58 | 'iv': os.environ.get('DEV_MNGR_CRYPTO_IV')} 59 | 60 | def get_db_url(self): 61 | """ From the config, return a valid postgresql url """ 62 | if self.dbpass is not None: 63 | return "{}://{}:{}@{}/{}".format(self.dbdriver, self.dbuser, self.dbpass, 64 | self.dbhost, self.dbname) 65 | else: 66 | return "{}://{}@{}/{}".format(self.dbdriver, self.dbuser, self.dbhost, self.dbname) 67 | 68 | def get_kafka_url(self): 69 | return "{}:{}".format(self.kafka_host, self.kafka_port) 70 | 71 | 72 | CONFIG = Config() 73 | -------------------------------------------------------------------------------- /DeviceManager/main.py: -------------------------------------------------------------------------------- 1 | from flask import g 2 | from flask_migrate import Migrate 3 | 4 | from DeviceManager.app import app 5 | 6 | # initialize modules 7 | import DeviceManager.DeviceHandler 8 | import DeviceManager.TemplateHandler 9 | import DeviceManager.LoggerHandler 10 | import DeviceManager.ImportHandler 11 | import DeviceManager.ErrorManager 12 | 13 | from .DatabaseHandler import db 14 | from .TenancyManager import list_tenants 15 | 16 | with app.app_context(): 17 | g.tenant = '__status_monitor__' 18 | 19 | migrate = Migrate(app, db) 20 | 21 | if __name__ == '__main__': 22 | app.run(host='0.0.0.0', threaded=True) 23 | -------------------------------------------------------------------------------- /DeviceManager/migrations: -------------------------------------------------------------------------------- 1 | ../migrations/versions/ -------------------------------------------------------------------------------- /DeviceManager/utils.py: -------------------------------------------------------------------------------- 1 | """ Assorted utils used throughout the service """ 2 | import base64 3 | import json 4 | import random 5 | from flask import make_response, jsonify 6 | from Crypto.Cipher import AES 7 | 8 | from DeviceManager.conf import CONFIG 9 | 10 | BS = AES.block_size 11 | pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS) 12 | unpad = lambda s : s[:-ord(s[len(s)-1:])] 13 | 14 | def format_response(status, message=None): 15 | """ Utility helper to generate default status responses """ 16 | if message: 17 | payload = {'message': message, 'status': status} 18 | elif 200 <= status < 300: 19 | payload = {'message': 'ok', 'status': status} 20 | else: 21 | payload = {'message': 'Request failed', 'status': status} 22 | 23 | return make_response(jsonify(payload), status) 24 | 25 | def create_id(): 26 | """ Generates a random hex id for managed entities """ 27 | return '%05x' % random.randrange(16**6) 28 | 29 | # from auth service 30 | class HTTPRequestError(Exception): 31 | """ Exception that represents end of processing on any given request. """ 32 | def __init__(self, error_code, message): 33 | super(HTTPRequestError, self).__init__() 34 | self.message = message 35 | self.error_code = error_code 36 | 37 | 38 | def get_pagination(request): 39 | try: 40 | page = int(request.args.get('page_num', '1')) 41 | per_page = int(request.args.get('page_size', '20')) 42 | 43 | # sanity checks 44 | if page < 1: 45 | raise HTTPRequestError(400, "Page numbers must be greater than 1") 46 | if per_page < 1: 47 | raise HTTPRequestError(400, "At least one entry per page is mandatory") 48 | return page, per_page 49 | 50 | except TypeError: 51 | raise HTTPRequestError(400, "page_size and page_num must be integers") 52 | 53 | def decode_base64(data): 54 | """Decode base64, padding being optional. 55 | 56 | :param data: Base64 data as an ASCII byte string 57 | :returns: The decoded byte string. 58 | 59 | """ 60 | missing_padding = len(data) % 4 61 | if missing_padding != 0: 62 | data += '=' * (4 - missing_padding) 63 | return base64.decodebytes(data.encode()).decode() 64 | 65 | def get_allowed_service(token): 66 | """ 67 | Parses the authorization token, returning the service to be used when 68 | configuring the FIWARE backend 69 | 70 | :param token: JWT token to be parsed 71 | :returns: Fiware-service to be used on API calls 72 | :raises ValueError: for invalid token received 73 | """ 74 | if not token: 75 | raise ValueError("Invalid authentication token") 76 | 77 | payload = token.split('.')[1] 78 | try: 79 | data = json.loads(decode_base64(payload)) 80 | return data['service'] 81 | except Exception as ex: 82 | raise ValueError("Invalid authentication token payload - not json object", ex) 83 | 84 | def encrypt(plain_text): 85 | # plain_text is padded so its length is multiple of cipher block size 86 | plain_text_pad = pad(plain_text) 87 | 88 | cipher = AES.new(CONFIG.crypto['key'], AES.MODE_CBC, CONFIG.crypto['iv']) 89 | encrypted = cipher.encrypt(plain_text_pad) 90 | 91 | return encrypted 92 | 93 | def decrypt(encrypted): 94 | 95 | cipher = AES.new(CONFIG.crypto['key'], AES.MODE_CBC, CONFIG.crypto['iv']) 96 | plain_text_pad = cipher.decrypt(encrypted) 97 | plain_text = unpad(plain_text_pad) 98 | 99 | return plain_text 100 | 101 | def retrieve_auth_token(request): 102 | token = None 103 | try: 104 | token = request.headers['authorization'] 105 | except KeyError: 106 | raise HTTPRequestError(401, "No authorization token has been supplied") 107 | return token -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6-alpine as basis 2 | 3 | RUN apk update && apk --no-cache add postgresql-dev gcc musl-dev 4 | 5 | RUN pip install cython 6 | 7 | RUN mkdir -p /usr/src/app/requirements 8 | WORKDIR /usr/src/app 9 | 10 | RUN python3 -m venv /usr/src/venv 11 | ENV VIRTUAL_ENV="/usr/src/venv" 12 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 13 | 14 | ADD . /usr/src/app 15 | RUN pip install -r requirements/requirements.txt 16 | 17 | FROM python:3.6-alpine 18 | 19 | COPY --from=basis /usr/src/venv /usr/src/venv 20 | COPY --from=basis /usr/src/app /usr/src/app 21 | 22 | RUN apk update && apk --no-cache add libpq 23 | 24 | ENV VIRTUAL_ENV="/usr/src/venv" 25 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 26 | ENV PYTHONPATH="/usr/src/app" 27 | WORKDIR /usr/src/app 28 | 29 | EXPOSE 5000 30 | 31 | CMD ["sh", "./docker/entrypoint.sh"] 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | device-manager 2 | This software is copyright (c) 2020 by CPQD . 3 | 4 | It is licensed under Apache License, Version 2.0. 5 | 6 | It uses libraries, which are available under the following licenses: 7 | 8 | * Apache License, Version 2.0 9 | - coverage (source code available at https://github.com/nedbat/coveragepy) 10 | - importlib-metadata (source code available at https://github.com/python/importlib_metadata) 11 | - kafka-python (source code available at https://github.com/dpkp/kafka-python) 12 | - python-dateutil (source code available at https://github.com/dateutil/dateutil) 13 | - python-editor (source code available at )https://github.com/fmoo/python-editor) 14 | - requests (source code available at https://github.com/psf/requests) 15 | 16 | * MIT 17 | - Flask-Migrate (source code available at https://github.com/miguelgrinberg/flask-migrate) 18 | - Mako (source code available at https://github.com/sqlalchemy/mako) 19 | - SQLAlchemy (source code available at https://github.com/sqlalchemy) 20 | - alchemy-mock (source code available at https://github.com/miki725/alchemy-mock) 21 | - alembic (source code available at https://github.com/sqlalchemy/alembic) 22 | - atomicwrites (source code available at https://github.com/untitaker/python-atomicwrites) 23 | - attrs (source code available at https://github.com/python-attrs/attrs) 24 | - colorlog (source code available at https://github.com/borntyping/python-colorlog) 25 | - gevent (source code available at https://github.com/gevent/gevent) 26 | - greenlet (source code available at https://github.com/python-greenlet/greenlet) 27 | - gunicorn (source code available at https://github.com/benoitc/gunicorn) 28 | - marshmallow (source code available at https://github.com/marshmallow-code/marshmallow) 29 | - more-itertools (source code availabe at https://github.com/more-itertools/more-itertools) 30 | - pluggy (source code availabe at https://github.com/pytest-dev/pluggy) 31 | - py (source code available at https://github.com/pytest-dev/py) 32 | - pytest (source code available at https://github.com/pytest-dev/pytest) 33 | - pytest-cov (source code available at https://github.com/pytest-dev/pytest-cov) 34 | - python-dateutil (source code available at https://github.com/dateutil/dateutil) 35 | - six (source code available at https://github.com/benjaminp/six) 36 | - urllib3 (source code available at https://github.com/urllib3/urllib3) 37 | - zipp (source code available at https://github.com/jaraco/zipp) 38 | 39 | * BSD (3-Clause) 40 | - Flask (source code available at https://github.com/pallets/flask) 41 | - Flask-SQLAlchemy (source code available at https://github.com/pallets/flask-sqlalchemy) 42 | - Jinja2 (source code available at https://github.com/pallets/jinja) 43 | - MarkupSafe (source code available at https://github.com/pallets/markupsafe) 44 | - Werkzeug (source code available at https://github.com/pallets/werkzeug) 45 | - click (source code available at https://github.com/pallets/click) 46 | - itsdangerous (source code available at https://github.com/pallets/itsdangerous) 47 | 48 | * BSD-Like 49 | - idna (source code available at https://github.com/kjd/idna) 50 | 51 | * MPL 2.0 52 | - certifi (source code available at https://github.com/certifi/python-certifi) 53 | 54 | * Python Software Foundation License 55 | - typing-extensions (source code available at https://github.com/python/typing) 56 | 57 | * LGPL 58 | - chardet (source code available at https://github.com/chardet/chardet) 59 | - psycopg2 (source code available at https://github.com/psycopg/psycopg2) 60 | 61 | * Public Domain 62 | - pycrypto (source code available at https://github.com/dlitz/pycrypto) 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Device Manager 2 | 3 | [![License badge](https://img.shields.io/badge/License-Apache%202.0-blue.svg)] 4 | [![Docker badge](https://img.shields.io/docker/pulls/dojot/iotagent-json.svg)](https://hub.docker.com/r/dojot/device-manager/) 5 | 6 | The Device Manager handles all CRUD operations related to devices in dojot. 7 | 8 | ## How does it work 9 | 10 | The Device Manager stores and retrieves information models for devices and templates and a few 11 | static information about them as well. Whenever a device is created, removed or just edited, it will 12 | publish a message through Kafka. 13 | 14 | ## Dependencies 15 | 16 | ### Dojot services 17 | 18 | The minimal set of dojot services needed to run Device Manager is: 19 | 20 | - Kafka 21 | - Data Broker 22 | - PostgreSQL 23 | 24 | ### Python libraries 25 | 26 | Check the [requirements file](./requirements/requirements.txt) for more details. 27 | 28 | ## Configuration 29 | 30 | Key | Purpose | Default Value | Accepted values 31 | -------------------- | ------------------------------- | ------------------- | ------------------------------------- 32 | BROKER | Kafka topic subject manager | http://data-broker | Hostname 33 | CREATE_DB | Option to create the database | True | Boolean 34 | DBDRIVER | PostgreSQL database driver | postgresql+psycopg2 | String 35 | DBHOST | PostgreSQL database host | postgres | String 36 | DBNAME | PostgreSQL database name | dojot_devm | String 37 | DBPASS | PostgreSQL database password | none | String 38 | DBUSER | PostgreSQL database user | postgres | String 39 | DEV_MNGR_CRYPTO_IV | Initialization vector of crypto | none | String 40 | DEV_MNGR_CRYPTO_PASS | Password of crypto | none | String 41 | DEV_MNGR_CRYPTO_SALT | Salt of crypto | none | String 42 | KAFKA_HOST | Kafka host | kafka | Hostname 43 | KAFKA_PORT | Kafka port | 9092 | Number 44 | LOG_LEVEL | Logger level | INFO | DEBUG, ERROR, WARNING, CRITICAL, INFO 45 | STATUS_TIMEOUT | Kafka timeout | 5 | Number 46 | 47 | ## How to run 48 | 49 | For a simple and fast setup, an official Docker image for this service is available on 50 | [DockerHub](https://hub.docker.com/r/dojot/device-manager). 51 | 52 | ### **Standalone - with Docker** 53 | 54 | If you really need to run Device Manager as a standalone process (without dojot's wonderful 55 | [Docker Compose](https://github.com/dojot/docker-compose), we suggest using the minimal 56 | [Docker Compose file](local/compose.yml). It contains only the minimum set of external services. To 57 | run them, follow these instructions: 58 | 59 | ```shell 60 | # Spin up local copies of remote dependencies 61 | docker-compose -f local/compose.yml -p devm up -d 62 | # Builds devm container (this may take a while) 63 | docker build -f Dockerfile -t local/devicemanager . 64 | # Runs devm manually, using the infra that's been just created 65 | # Must pass the environment variables of crypto to run 66 | docker run --rm -it --network devm_default -e DEV_MNGR_CRYPTO_PASS=${CRYPTO_PASS} -e DEV_MNGR_CRYPTO_IV=${CRYPTO_IV} -e DEV_MNGR_CRYPTO_SALT=${CRYPTO_SALT} local/devicemanager 67 | # 68 | # Example: docker run --rm -it --network devm_default -e DEV_MNGR_CRYPTO_PASS='kamehameHA' -e DEV_MNGR_CRYPTO_IV=1234567890123456 -e DEV_MNGR_CRYPTO_SALT='shuriken' local/devicemanager 69 | # 70 | # Hitting ^C will actually kill device-manager's process and the container 71 | # 72 | ``` 73 | 74 | ### **Standalone - without Docker** 75 | 76 | "Ok, but I ***really*** want to run device manager on my machine - no Docker no nothing." 77 | 78 | You can execute the following commands (it's just what runs in the container, actually - check 79 | `docker/entrypoint.sh` and `Dockerfile`). 80 | 81 | ```shell 82 | # install dependencies locally (may take a while) 83 | python setup.py develop 84 | 85 | export DBHOST="postgres ip/hostname goes here" 86 | export KAFKA_HOST="kafka ip/hostname goes here" 87 | 88 | docker/waitForDb.py 89 | gunicorn DeviceManager.main:app -k gevent --logfile - --access-logfile - 90 | ``` 91 | 92 | Do notice that all those external infra (Kafka and PostgreSQL) will have to be up and running still. 93 | At a minimum, please remember to configure the two environment variables above (specially if they 94 | are both `localhost`). 95 | 96 | Keep in mind that running a standalone instance of Device Manager misses a lot of security checks 97 | (such as user identity checks, proper multi-tenancy validations, and so on). In particular, every 98 | request sent to Device Manager needs an access token, which should be retrieved from the 99 | [Auth](https://github.com/dojot/auth) component. In the examples listed in this README, you can 100 | generate one by yourself (for now, Device Manager doesn't check if the token is actually valid for 101 | that user - they are verified by Auth and the API gateway), but this method might not work in the 102 | future as more strict token checks are implemented in this service. 103 | 104 | ## How to use 105 | 106 | The usage is via the REST API. Check the 107 | [API documentation](https://dojot.github.io/device-manager/apiary_latest.html) for more details. 108 | 109 | ## Concepts 110 | 111 | This service holds two of the most basic and essential concepts in the dojot platform: the `device` 112 | and the `template`. Before reading about the events, it's important to understand what each one is 113 | and know their parameters. 114 | 115 | ### **Device** 116 | 117 | In dojot, a device is a digital representation of an actual device or gateway with one or more 118 | sensors or of a virtual one with sensors/attributes inferred from other devices. 119 | 120 | Consider, for instance, an actual device with temperature and humidity sensors; it can be 121 | represented into dojot as a device with two attributes (one for each sensor). We call this kind of 122 | device as regular device or by its communication protocol, for instance, MQTT device or CoAP device. 123 | 124 | We can also create devices which don’t directly correspond to their associated physical ones, for 125 | instance, we can create one with higher level of information of temperature (is becoming hotter or 126 | is becoming colder) whose values are inferred from temperature sensors of other devices. This kind 127 | of device is called virtual device. 128 | 129 | The information model used for both “real” and virtual devices is as following: 130 | 131 | | Attribute | Type | Mode | Required | Description 132 | | ------------- | ------------------------------------------------------- | ---------- | -------- | ------------------------------------------------------------------- 133 | | **attrs** | Map of attributes | read-only | No | Map of device's attributes (check the attributes in the next table) 134 | | **created** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Device creation time. 135 | | **id** | String (length of 8 bytes) | read-only | No | Unique identifier for the device. 136 | | **label** | String (length of 128 bytes) | read-write | Yes | An user-defined label to facilitate the device's identification. 137 | | **templates** | Strings list | read-only | No | List of template IDs used by the device. 138 | | **updated** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Device last update time. 139 | 140 | Example device: 141 | 142 | ```json 143 | { 144 | "attrs": { 145 | "1": [ 146 | { 147 | "created": "2020-09-16T14:50:09.297163+00:00", 148 | "id": 1, 149 | "is_static_overridden": false, 150 | "label": "rain", 151 | "static_value": "", 152 | "template_id": "1", 153 | "type": "dynamic", 154 | "value_type": "float" 155 | } 156 | ] 157 | }, 158 | "created": "2020-09-16T14:50:34.749230+00:00", 159 | "updated": "2020-09-16T14:55:41.897400+00:00", 160 | "id": "e06357", 161 | "label": "teste", 162 | "templates": [ 163 | 1 164 | ] 165 | } 166 | ``` 167 | 168 | The accepted parameters in the `attrs` map are: 169 | 170 | | Attribute | Type | Mode | Required | Description 171 | | ------------------------ | ------------------------------------------------------- | ---------- | -------- | ------------------------------------------------------------------- 172 | | **created** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Device creation time. 173 | | **id** | Integer | read-write | No | Unique identifier for the attribute (automatically generated). 174 | | **is_static_overridden** | Bool | read-write | No | Whether the static value were overridden. 175 | | **label** | String (length of 128 bytes) | read-write | Yes | An user-defined label to facilitate the attribute's identification. 176 | | **static_value** | String (length of 128 bytes) | read-write | No | The attribute's static value (if it is a static attribute). 177 | | **template_id** | Integer | read-write | No | From which template did this attribute come from. 178 | | **type** | String (length of 32 bytes) | read-write | Yes | Attribute type (`static`, `dynamic`, `actuator`). 179 | | **updated** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Attribute last update time. 180 | | **value_type** | String (length of 32 bytes) | read-write | Yes | Attribute value type (`string`, `float`, `integer`, `geo`). 181 | 182 | All attributes that are read/write can be used when creating or updating the device. All of them are 183 | returned when retrieving device data. 184 | 185 | An example of such structure would be: 186 | 187 | ```json 188 | "attrs": { 189 | "1": [ 190 | { 191 | "label": "rain", 192 | "value_type": "float", 193 | "template_id": "1", 194 | "id": 1, 195 | "static_value": "", 196 | "type": "dynamic", 197 | "created": "2020-09-16T14:50:09.297163+00:00", 198 | "is_static_overridden": false 199 | }, 200 | { 201 | "label": "mark", 202 | "value_type": "string", 203 | "template_id": "1", 204 | "id": 2, 205 | "static_value": "efac", 206 | "type": "static", 207 | "created": "2020-09-16T14:58:25.905376+00:00", 208 | "is_static_overridden": false 209 | } 210 | ] 211 | } 212 | ``` 213 | 214 | ### **Template** 215 | 216 | All devices are based on a **template**, which can be thought as a blueprint: all devices built 217 | using the same template will have the same characteristics. Templates in dojot have one label (any 218 | alphanumeric sequence), a list of attributes which will hold all the device emitted information, and 219 | optionally a few special attributes which will indicate how the device communicates, including 220 | transmission methods (protocol, ports, etc.) and message formats. 221 | 222 | In fact, templates can represent not only *device models*, but it can also abstract a *class of 223 | devices*. For instance, we could have one template to represent all thermometers that will be used 224 | in dojot. This template would have also only one attribute called `temperature`. While creating the 225 | device, the user would select its *physical template*, let's say *TexasInstr882*, and the 226 | `thermometer` template. The user would have also to add the translation instructions in order to map 227 | the temperature reading that will be sent from the device to the `temperature` attribute. 228 | 229 | In order to create a device, a user selects which templates are going to compose this new device. 230 | All their attributes are merged together and associated to it - they are tightly linked to the 231 | original template so that any template update will reflect all associated devices. 232 | 233 | The information model used for templates is: 234 | 235 | | Attribute | Type | Mode | Required | Description 236 | | ---------------- | ------------------------------------------------------- | ---------- | -------- | -------------------------------------------------------------------------------- 237 | | **attrs** | Map of attributes | read-write | No | Merges the `config_attrs` and the `data_attrs` parameters. 238 | | **config_attrs** | Map of attributes | read-write | No | Stores attributes with the type `meta`. 239 | | **created** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Device creation time. 240 | | **data_attrs** | Map of attributes | read-write | No | Stores attributes with the types `dynamic`, `static` and `actuator`. 241 | | **id** | String (length of 8 bytes) | read-write | No | Unique identifier for the template. 242 | | **label** | String (length of 128 bytes) | read-write | Yes | An user-defined label to facilitate the template's identification. 243 | | **updated** | DateTime (with timezone and µs precision) in ISO format | read-only | No | Device last update time. 244 | 245 | An example template structure: 246 | 247 | ```json 248 | { 249 | "label": "teste", 250 | "attrs": [ 251 | { 252 | "label": "rain", 253 | "value_type": "float", 254 | "template_id": "1", 255 | "id": 1, 256 | "static_value": "", 257 | "type": "dynamic", 258 | "created": "2020-09-16T14:50:09.297163+00:00" 259 | }, 260 | { 261 | "label": "mark", 262 | "value_type": "string", 263 | "template_id": "1", 264 | "id": 2, 265 | "static_value": "efac", 266 | "type": "static", 267 | "created": "2020-09-16T14:58:25.905376+00:00" 268 | } 269 | ], 270 | "data_attrs": [ 271 | { 272 | "label": "rain", 273 | "value_type": "float", 274 | "template_id": "1", 275 | "id": 1, 276 | "static_value": "", 277 | "type": "dynamic", 278 | "created": "2020-09-16T14:50:09.297163+00:00" 279 | }, 280 | { 281 | "label": "mark", 282 | "value_type": "string", 283 | "template_id": "1", 284 | "id": 2, 285 | "static_value": "efac", 286 | "type": "static", 287 | "created": "2020-09-16T14:58:25.905376+00:00" 288 | } 289 | ], 290 | "id": 1, 291 | "config_attrs": [ ], 292 | "created": "2020-09-16T14:50:09.292714+00:00" 293 | } 294 | ``` 295 | 296 | All attributes that are read-write can be used when creating or updating the template. All of them 297 | are returned when retrieving device data. You might also notice some new attributes: 298 | - `data_attrs`: stores attributes with the types `dynamic`, `static` and `actuator`. 299 | - `config_attrs`: stores attributes with the type `meta`. You can only create this type of attribute 300 | via API, check its [documentation](https://dojot.github.io/device-manager/apiary_latest.html) for 301 | more details. 302 | 303 | These two parameters are merged in the `attrs`. 304 | 305 | ## Events 306 | 307 | There are some messages that are published by Device Manager to Kafka. These messages are 308 | notifications of device management operations, and they can be consumed by any component interested 309 | in them, such as IoT agents. 310 | 311 | For more information on the parameters of the messages, please refer to the [Device Manager concepts 312 | topic](#concepts). 313 | 314 | __NOTE THAT__ all messages reside in Kafka's `dojot.device-manager.device` topic. 315 | 316 | The events that are emitted by the Device Manager are: 317 | 318 | - `configure` 319 | - `create` 320 | - `remove` 321 | - `update` 322 | - `template.update` **deprecated** 323 | 324 | ### **Event: `configure`** 325 | 326 | This message is published whenever a device must be configured. Its payload is: 327 | 328 | ```json 329 | { 330 | "event": "configure", 331 | "meta": { 332 | "service": "admin", 333 | "timestamp": 1557493697 334 | }, 335 | "data" : { 336 | "id" : "efac", 337 | "attrs": { 338 | "target_temperature" : 23.5 339 | } 340 | } 341 | } 342 | ``` 343 | 344 | The attribute actually used by the device would be `target_temperature` so that it can, for example, 345 | correctly adjust the temperature. It’s up to the receiver of this message (an IoT agent, for 346 | instance) to properly send the configuration to the device. 347 | 348 | ### **Event: `create`** 349 | 350 | This message is published whenever a new device is created. Its payload is: 351 | 352 | ```json 353 | { 354 | "event": "create", 355 | "data": { 356 | "label": "teste", 357 | "templates": [ 358 | 1 359 | ], 360 | "id": "e06357", 361 | "created": "2020-09-16T14:50:34.749230+00:00", 362 | "attrs": { 363 | "1": [ 364 | { 365 | "label": "rain", 366 | "value_type": "float", 367 | "template_id": "1", 368 | "id": 1, 369 | "static_value": "", 370 | "type": "dynamic", 371 | "created": "2020-09-16T14:50:09.297163+00:00", 372 | "is_static_overridden": false 373 | } 374 | ] 375 | } 376 | }, 377 | "meta": { 378 | "service": "admin" 379 | } 380 | } 381 | ``` 382 | 383 | 384 | ### **Event: `remove`** 385 | 386 | This message is published whenever a device is removed. Its payload is: 387 | 388 | ```json 389 | { 390 | "event": "remove", 391 | "meta": { 392 | "service": "admin" 393 | }, 394 | "data": { 395 | "id": "efac" 396 | } 397 | } 398 | ``` 399 | 400 | ### **Event: `update`** 401 | 402 | This message is published whenever a new device is directly or indirectly updated. The `indirectly 403 | updated` case happens when a template associated with the device is updated. Its payload looks very 404 | similar to device creation: 405 | 406 | ```json 407 | { 408 | "event": "update", 409 | "data": { 410 | "label": "teste", 411 | "templates": [ 412 | 1 413 | ], 414 | "id": "e06357", 415 | "created": "2020-09-16T14:50:34.749230+00:00", 416 | "attrs": { 417 | "1": [ 418 | { 419 | "label": "rain", 420 | "value_type": "float", 421 | "template_id": "1", 422 | "id": 1, 423 | "static_value": "", 424 | "type": "dynamic", 425 | "created": "2020-09-16T14:50:09.297163+00:00", 426 | "is_static_overridden": false 427 | }, 428 | { 429 | "label": "mark", 430 | "value_type": "string", 431 | "template_id": "1", 432 | "id": 2, 433 | "static_value": "efac", 434 | "type": "static", 435 | "created": "2020-09-16T14:58:25.905376+00:00", 436 | "is_static_overridden": false 437 | } 438 | ] 439 | } 440 | }, 441 | "meta": { 442 | "service": "admin" 443 | } 444 | } 445 | ``` 446 | 447 | ### **Event: `template.update` (deprecated)** 448 | 449 | __IMPORTANT__: this event is deprecated and can be removed from the platform soon. 450 | 451 | This event is emitted every time a template is updated. It contains all the affected devices and the 452 | new model for that template. Its payload is: 453 | 454 | ```json 455 | { 456 | "event": "template.update", 457 | "data": { 458 | "affected": [ 459 | "e06357" 460 | ], 461 | "template": { 462 | "label": "teste", 463 | "attrs": [ 464 | { 465 | "label": "rain", 466 | "value_type": "float", 467 | "template_id": "1", 468 | "id": 1, 469 | "static_value": "", 470 | "type": "dynamic", 471 | "created": "2020-09-16T14:50:09.297163+00:00" 472 | }, 473 | { 474 | "label": "mark", 475 | "value_type": "string", 476 | "template_id": "1", 477 | "id": 2, 478 | "static_value": "efac", 479 | "type": "static", 480 | "created": "2020-09-16T14:58:25.905376+00:00" 481 | } 482 | ], 483 | "data_attrs": [ 484 | { 485 | "label": "rain", 486 | "value_type": "float", 487 | "template_id": "1", 488 | "id": 1, 489 | "static_value": "", 490 | "type": "dynamic", 491 | "created": "2020-09-16T14:50:09.297163+00:00" 492 | }, 493 | { 494 | "label": "mark", 495 | "value_type": "string", 496 | "template_id": "1", 497 | "id": 2, 498 | "static_value": "efac", 499 | "type": "static", 500 | "created": "2020-09-16T14:58:25.905376+00:00" 501 | } 502 | ], 503 | "id": 1, 504 | "config_attrs": [ ], 505 | "created": "2020-09-16T14:50:09.292714+00:00" 506 | } 507 | }, 508 | "meta": { 509 | "service": "admin" 510 | } 511 | } 512 | ``` 513 | -------------------------------------------------------------------------------- /createMigration.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # This helps devs create easier to maintain migrations for device-manager 4 | # When migrations are run, pg schemas (namespaces) are set up in runtime, but it is quite 5 | # cumbersome to generate said "shema-less" migrations in the first place. 6 | # 7 | # This creates a clean slate alembic migrations environment to generate new migrations 8 | # for the project. 9 | 10 | get_dir() { 11 | newdir=$(tr -cd '[:alnum:]' < /dev/urandom | fold -w10 | head -n1) 12 | while [ -d ${newdir} ] ; do 13 | newdir=$(tr -cd '[:alnum:]' < /dev/urandom | fold -w10 | head -n1) 14 | done 15 | echo ${newdir} 16 | } 17 | 18 | script_home=$( dirname $(readlink -f "$0") ) 19 | target=$(get_dir) 20 | home="${script_home}/migrations/versions" 21 | flask db init --directory ${target} 22 | for i in ${home}/*.py ; do 23 | ln -s ${i} ${target}/versions/$(basename ${i}) ; 24 | done 25 | 26 | flask db migrate --directory ${target} 27 | result=$? 28 | 29 | if [ ${result} != 0 ] ; then 30 | flask db upgrade --directory ${target} 31 | flask db migrate --directory ${target} 32 | fi 33 | 34 | 35 | for i in ${target}/versions/*.py ; do 36 | [ ! -h ${i} ] && cp ${i} ${home} 37 | done 38 | 39 | # remove unneeded file 40 | rm -rf ${target} 41 | -------------------------------------------------------------------------------- /docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | command=${1:-start} 4 | 5 | TIMEOUT=${GUNICORN_TIMEOUT:-30} 6 | 7 | migrate () { 8 | export FLASK_APP=DeviceManager/main.py 9 | flask db upgrade 10 | unset FLASK_APP 11 | } 12 | 13 | stamp () { 14 | export FLASK_APP=DeviceManager/main.py 15 | flask db stamp 6beff7876a3a 16 | unset FLASK_APP 17 | } 18 | 19 | if [ ${command} = 'start' ]; then 20 | flag=0 21 | retries=0 22 | max_retries=5 23 | sleep_time=5 24 | 25 | echo "Waiting for DB to come up" 26 | python docker/waitForDb.py -w ${sleep_time} -r ${max_retries} 27 | if [ $? -ne 0 ]; then 28 | echo "Could not connect to DB, shutting down!" 29 | exit 1 30 | fi 31 | echo "Finished waiting for DB" 32 | 33 | while [ ${flag} -eq 0 ]; do 34 | if [ ${retries} -eq ${max_retries} ]; then 35 | echo Executed ${retries} retries, aborting 36 | exit 1 37 | fi 38 | echo gunicorn timeout is ${TIMEOUT} 39 | exec gunicorn DeviceManager.main:app \ 40 | --bind 0.0.0.0:5000 \ 41 | --reload -R \ 42 | --timeout ${TIMEOUT} \ 43 | --access-logfile - \ 44 | --log-file - \ 45 | --env PYTHONUNBUFFERED=1 -k gevent 2>&1 46 | 47 | if [ $? -eq 0 ]; then 48 | flag=1 49 | else 50 | echo "Cannot start application, retying in ${sleep_time} seconds..." 51 | sleep ${sleep_time} 52 | retries=$((retries + 1)) 53 | fi 54 | done 55 | elif [ ${command} = 'migrate' ] ; then 56 | migrate 57 | elif [ ${command} = '020_stamp' ] ; then 58 | stamp 59 | fi 60 | -------------------------------------------------------------------------------- /docker/waitForDb.py: -------------------------------------------------------------------------------- 1 | import psycopg2 2 | from time import sleep 3 | import argparse 4 | 5 | from DeviceManager.conf import CONFIG 6 | 7 | 8 | def wait_for_db(db_args): 9 | """ blocks execution until database is ready """ 10 | 11 | print('Waiting for database to become available...') 12 | retries = db_args.retries 13 | while retries > 0: 14 | try: 15 | connection = psycopg2.connect(user=CONFIG.dbuser, password=CONFIG.dbpass, 16 | host=CONFIG.dbhost) 17 | if CONFIG.create_db: 18 | connection.autocommit = True 19 | cursor = connection.cursor() 20 | cursor.execute("select true from pg_database where datname = '%s';" % CONFIG.dbname) 21 | if len(cursor.fetchall()) == 0: 22 | print("will attempt to create database") 23 | cursor.execute("CREATE database %s;" % CONFIG.dbname) 24 | print("Ready to go") 25 | exit(0) 26 | except psycopg2.Error as e: 27 | print("Database connection error | {}".format(e.pgerror)) 28 | 29 | retries -= 1 30 | print('Will try again in ' + str(db_args.wait)) 31 | sleep(db_args.wait) 32 | 33 | print('Max retries reached, failed to connect to Postgres') 34 | exit(1) 35 | 36 | 37 | if __name__ == '__main__': 38 | desc = """Waits for database""" 39 | parser = argparse.ArgumentParser(description=desc) 40 | parser.add_argument('-w', '--wait', help="", default=5, type=int) 41 | parser.add_argument('-r', '--retries', help="", default=20, type=int) 42 | args = parser.parse_args() 43 | wait_for_db(args) 44 | -------------------------------------------------------------------------------- /dredd.yml: -------------------------------------------------------------------------------- 1 | dry-run: null 2 | hookfiles: "./tests/dredd-hooks/*hook.py" 3 | language: python 4 | sandbox: false 5 | init: false 6 | custom: {} 7 | names: false 8 | only: [] 9 | reporter: none 10 | output: [] 11 | header: [] 12 | sorted: false 13 | user: null 14 | inline-errors: false 15 | details: false 16 | method: [] 17 | color: true 18 | level: info 19 | timestamp: false 20 | silent: false 21 | path: [] 22 | hooks-worker-timeout: 5000 23 | hooks-worker-connect-timeout: 4500 24 | hooks-worker-connect-retry: 500 25 | hooks-worker-after-connect-wait: 100 26 | hooks-worker-term-timeout: 5000 27 | hooks-worker-term-retry: 500 28 | hooks-worker-handler-host: 127.0.0.1 29 | hooks-worker-handler-port: 61321 30 | config: ./dredd.yml 31 | blueprint: ./docs/apiary.apib 32 | endpoint: 'http://device-manager:5000' 33 | -------------------------------------------------------------------------------- /local/compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | services: 3 | 4 | postgres: 5 | image: "postgres:9.4" 6 | 7 | zookeeper: 8 | image: "zookeeper:3.4" 9 | 10 | kafka: 11 | image: "ches/kafka:0.10.1.1" 12 | depends_on: 13 | - zookeeper 14 | environment: 15 | ZOOKEEPER_IP: zookeeper 16 | -------------------------------------------------------------------------------- /migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /migrations/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | # file_template = %%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | 11 | 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [handler_console] 38 | class = StreamHandler 39 | args = (sys.stderr,) 40 | level = NOTSET 41 | formatter = generic 42 | 43 | [formatter_generic] 44 | format = %(levelname)-5.5s [%(name)s] %(message)s 45 | datefmt = %H:%M:%S 46 | -------------------------------------------------------------------------------- /migrations/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import with_statement 2 | import logging 3 | from logging.config import fileConfig 4 | from alembic import context 5 | from sqlalchemy import engine_from_config, pool 6 | from flask import current_app 7 | from DeviceManager.TenancyManager import list_tenants 8 | 9 | # this is the Alembic Config object, which provides 10 | # access to the values within the .ini file in use. 11 | config = context.config 12 | 13 | # Interpret the config file for Python logging. 14 | # This line sets up loggers basically. 15 | fileConfig(config.config_file_name) 16 | logger = logging.getLogger('alembic.env') 17 | 18 | # add your model's MetaData object here 19 | # for 'autogenerate' support 20 | # from myapp import mymodel 21 | # target_metadata = mymodel.Base.metadata 22 | config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) 23 | target_metadata = current_app.extensions['migrate'].db.metadata 24 | 25 | # other values from the config, defined by the needs of env.py, 26 | # can be acquired: 27 | # my_important_option = config.get_main_option("my_important_option") 28 | # ... etc. 29 | 30 | 31 | def run_migrations_offline(): 32 | """Run migrations in 'offline' mode. 33 | 34 | This configures the context with just a URL 35 | and not an Engine, though an Engine is acceptable 36 | here as well. By skipping the Engine creation 37 | we don't even need a DBAPI to be available. 38 | 39 | Calls to context.execute() here emit the given string to the 40 | script output. 41 | 42 | """ 43 | url = config.get_main_option("sqlalchemy.url") 44 | context.configure(url=url) 45 | 46 | with context.begin_transaction(): 47 | context.run_migrations() 48 | 49 | def get_context(): 50 | 51 | # this callback is used to prevent an auto-migration from being generated 52 | # when there are no changes to the schema 53 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html 54 | def process_revision_directives(context, revision, directives): 55 | #pylint: disable=unused-argument 56 | if getattr(config.cmd_opts, 'autogenerate', False): 57 | script = directives[0] 58 | if script.upgrade_ops.is_empty(): 59 | directives[:] = [] 60 | logger.info('No changes in schema detected.') 61 | 62 | engine = engine_from_config(config.get_section(config.config_ini_section), 63 | prefix='sqlalchemy.', 64 | poolclass=pool.NullPool) 65 | connection = engine.connect() 66 | context.configure(connection=connection, 67 | target_metadata=target_metadata, 68 | process_revision_directives=process_revision_directives, 69 | **current_app.extensions['migrate'].configure_args) 70 | return context, connection 71 | 72 | def run_migrations_online(): 73 | """Run migrations in 'online' mode. 74 | 75 | In this scenario we need to create an Engine 76 | and associate a connection with the context. 77 | 78 | """ 79 | 80 | tenants = [] 81 | conn = get_context()[1] 82 | try: 83 | tenants = list_tenants(conn) 84 | finally: 85 | conn.close() 86 | 87 | for tenant in tenants: 88 | ctx, connection = get_context() 89 | try: 90 | logger.info('About to migrate tenant %s', tenant) 91 | connection.execute('set search_path to "{}"'.format(tenant)) 92 | with ctx.begin_transaction(): 93 | ctx.run_migrations() 94 | finally: 95 | connection.close() 96 | 97 | if context.is_offline_mode(): 98 | run_migrations_offline() 99 | else: 100 | run_migrations_online() 101 | -------------------------------------------------------------------------------- /migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /migrations/versions/6beff7876a3a_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 6beff7876a3a 4 | Revises: 5 | Create Date: 2018-04-16 15:34:37.785153 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '6beff7876a3a' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('devices', 22 | sa.Column('id', sa.String(length=8), nullable=False), 23 | sa.Column('label', sa.String(length=128), nullable=False), 24 | sa.Column('created', sa.DateTime(), nullable=True), 25 | sa.Column('updated', sa.DateTime(), nullable=True), 26 | sa.Column('persistence', sa.String(length=128), nullable=True), 27 | sa.PrimaryKeyConstraint('id'), 28 | sa.UniqueConstraint('id') 29 | ) 30 | op.create_table('templates', 31 | sa.Column('id', sa.Integer(), sa.Sequence('template_id'), nullable=False), 32 | sa.Column('label', sa.String(length=128), nullable=False), 33 | sa.Column('created', sa.DateTime(), nullable=True), 34 | sa.Column('updated', sa.DateTime(), nullable=True), 35 | sa.PrimaryKeyConstraint('id') 36 | ) 37 | op.execute(sa.schema.CreateSequence(sa.Sequence('template_id'))) 38 | op.create_table('attrs', 39 | sa.Column('id', sa.Integer(), sa.Sequence('attr_id'), nullable=False), 40 | sa.Column('label', sa.String(length=128), nullable=False), 41 | sa.Column('created', sa.DateTime(), nullable=True), 42 | sa.Column('updated', sa.DateTime(), nullable=True), 43 | sa.Column('type', sa.String(length=32), nullable=False), 44 | sa.Column('value_type', sa.String(length=32), nullable=False), 45 | sa.Column('static_value', sa.String(length=128), nullable=True), 46 | sa.Column('template_id', sa.Integer(), nullable=True), 47 | sa.Column('parent_id', sa.Integer(), nullable=True), 48 | sa.CheckConstraint('((template_id IS NULL) AND NOT (parent_id IS NULL)) OR (NOT (template_id IS NULL) AND (parent_id IS NULL))'), 49 | sa.ForeignKeyConstraint(['parent_id'], ['attrs.id'], ), 50 | sa.ForeignKeyConstraint(['template_id'], ['templates.id'], ), 51 | sa.PrimaryKeyConstraint('id'), 52 | sa.UniqueConstraint('template_id', 'type', 'label') 53 | ) 54 | op.execute(sa.schema.CreateSequence(sa.Sequence('attr_id'))) 55 | op.create_table('device_template', 56 | sa.Column('device_id', sa.String(length=8), nullable=False), 57 | sa.Column('template_id', sa.Integer(), nullable=False), 58 | sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ), 59 | sa.ForeignKeyConstraint(['template_id'], ['templates.id'], ), 60 | sa.PrimaryKeyConstraint('device_id', 'template_id') 61 | ) 62 | op.create_index(op.f('ix_device_template_device_id'), 'device_template', ['device_id'], unique=False) 63 | op.create_index(op.f('ix_device_template_template_id'), 'device_template', ['template_id'], unique=False) 64 | op.create_table('overrides', 65 | sa.Column('id', sa.Integer(), sa.Sequence('override_id'), nullable=False), 66 | sa.Column('did', sa.String(length=8), nullable=True), 67 | sa.Column('aid', sa.Integer(), nullable=True), 68 | sa.Column('static_value', sa.String(length=128), nullable=True), 69 | sa.ForeignKeyConstraint(['aid'], ['attrs.id'], ), 70 | sa.ForeignKeyConstraint(['did'], ['devices.id'], ), 71 | sa.PrimaryKeyConstraint('id') 72 | ) 73 | op.execute(sa.schema.CreateSequence(sa.Sequence('override_id'))) 74 | # ### end Alembic commands ### 75 | 76 | 77 | def downgrade(): 78 | # ### commands auto generated by Alembic - please adjust! ### 79 | op.drop_table('overrides') 80 | op.drop_index(op.f('ix_device_template_template_id'), table_name='device_template') 81 | op.drop_index(op.f('ix_device_template_device_id'), table_name='device_template') 82 | op.drop_table('device_template') 83 | op.drop_table('attrs') 84 | op.drop_table('templates') 85 | op.drop_table('devices') 86 | # ### end Alembic commands ### 87 | -------------------------------------------------------------------------------- /migrations/versions/fabf2ca39860_.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: fabf2ca39860 4 | Revises: 6beff7876a3a 5 | Create Date: 2018-04-16 15:43:09.997566 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'fabf2ca39860' 14 | down_revision = '6beff7876a3a' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('pre_shared_keys', 22 | sa.Column('attr_id', sa.Integer(), nullable=False), 23 | sa.Column('device_id', sa.String(length=8), nullable=False), 24 | sa.Column('psk', sa.Binary(), nullable=False), 25 | sa.ForeignKeyConstraint(['attr_id'], ['attrs.id'], ), 26 | sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ), 27 | sa.PrimaryKeyConstraint('attr_id', 'device_id') 28 | ) 29 | op.create_unique_constraint(None, 'devices', ['id']) 30 | # ### end Alembic commands ### 31 | 32 | 33 | def downgrade(): 34 | # ### commands auto generated by Alembic - please adjust! ### 35 | op.drop_constraint(None, 'devices', type_='unique') 36 | op.drop_table('pre_shared_keys') 37 | # ### end Alembic commands ### 38 | -------------------------------------------------------------------------------- /requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | alchemy-mock==0.4.1 2 | alembic==1.4.3 3 | atomicwrites==1.4.0 4 | attrs==20.3.0 5 | certifi==2018.1.18 6 | chardet==3.0.4 7 | click==6.7 8 | colorlog==3.1.4 9 | coverage==5.3.1 10 | Flask==0.12.2 11 | Flask-Alembic==2.0.1 12 | Flask-Migrate==2.1.1 13 | Flask-SQLAlchemy==2.3.2 14 | gevent==1.2.2 15 | greenlet==0.4.13 16 | gunicorn==19.7.1 17 | idna==2.6 18 | importlib-metadata==3.4.0 19 | itsdangerous==0.24 20 | Jinja2==2.10 21 | kafka-python==1.4.1 22 | Mako==1.1.4 23 | MarkupSafe==1.1.1 24 | marshmallow==3.0.0b8 25 | more-itertools==8.6.0 26 | pluggy==0.13.1 27 | psycopg2==2.7.4 28 | py==1.10.0 29 | pycrypto==2.6.1 30 | pytest==4.0.0 31 | pytest-cov==2.6.0 32 | python-dateutil==2.8.1 33 | python-editor==1.0.4 34 | requests==2.18.4 35 | six==1.15.0 36 | SQLAlchemy==1.2.4 37 | typing-extensions==3.7.4.3 38 | urllib3==1.22 39 | Werkzeug==0.14.1 40 | zipp==3.4.0 41 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | import setuptools 5 | import sys 6 | 7 | try: 8 | from pip import download 9 | from pip import req 10 | except ImportError as e: 11 | from pip._internal import download 12 | from pip._internal import req 13 | 14 | 15 | HERE = os.path.abspath(os.path.dirname(__file__)) 16 | 17 | 18 | def get_requirements(reqfile): 19 | path = os.path.join(HERE, reqfile) 20 | deps = list() 21 | for dep in req.parse_requirements(path, session=download.PipSession()): 22 | try: 23 | # Pip 8.1.2 Compatible 24 | specs = ','.join(''.join(str(spec)) for spec in dep.req.specifier) 25 | except AttributeError: 26 | # Pip 1.5.4 Compatible 27 | specs = ','.join(''.join(spec) for spec in dep.req.specs) 28 | requirement = '{name}{extras}{specs}'.format( 29 | name=dep.name, 30 | extras=( 31 | '[{extras}]'.format(extras=','.join(dep.extras)) 32 | if dep.extras else '' 33 | ), 34 | specs=specs, 35 | ) 36 | 37 | deps.append(requirement) 38 | return deps 39 | 40 | 41 | if sys.argv[1] == 'develop': 42 | setuptools.setup( 43 | name='DeviceManager', 44 | description='Dojot device manager.', 45 | version=':versiontools:DeviceManager:', 46 | 47 | packages=setuptools.find_packages(), 48 | include_package_data=True, 49 | install_requires=get_requirements('requirements/requirements.txt'), 50 | setup_requires=('versiontools'), 51 | 52 | author='Matheus Magalhaes', 53 | author_email='matheusr@cpqd.com.br', 54 | url='dojot.com.br', 55 | ) 56 | elif sys.argv[1] == 'tests': 57 | setuptools.setup( 58 | name='DeviceManager', 59 | description='Dojot device manager.', 60 | version=':versiontools:DeviceManager:', 61 | 62 | packages=setuptools.find_packages(), 63 | include_package_data=True, 64 | install_requires=get_requirements('tests/requirements.txt'), 65 | setup_requires=('versiontools'), 66 | 67 | author='Matheus Magalhaes', 68 | author_email='matheusr@cpqd.com.br', 69 | url='dojot.com.br', 70 | ) 71 | -------------------------------------------------------------------------------- /tests/.dockerignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /tests/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM dojot/dredd-python:3.6 2 | 3 | RUN mkdir -p /usr/src/app/requirements && mkdir -p /usr/src/app/tests 4 | WORKDIR /usr/src/app 5 | 6 | ADD requirements/requirements.txt requirements/ 7 | ADD tests/requirements.txt tests/ 8 | RUN pip install -r /usr/src/app/requirements/requirements.txt 9 | RUN pip install -r /usr/src/app/tests/requirements.txt 10 | 11 | ADD . . 12 | ENV PYTHONPATH=${PYTHONPATH}:/usr/src/app 13 | ENV SINGLE_TENANT=true 14 | 15 | CMD ["./tests/start-test.sh"] 16 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dojot/device-manager/28176396984e36ba3484e5bb32bee23491420fb7/tests/__init__.py -------------------------------------------------------------------------------- /tests/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '2.1' 2 | services: 3 | 4 | data-broker: 5 | image: dojot/data-broker 6 | restart: always 7 | depends_on: 8 | - kafka 9 | - data-broker-redis 10 | 11 | zookeeper: 12 | image: "zookeeper:3.4" 13 | restart: always 14 | 15 | postgres-users: 16 | image: dojot/postgres:9.4.24-alpine 17 | restart: on-failure 18 | command: > 19 | bash -c "createuser kong -d -h postgres -U postgres && createdb kong -U kong -h postgres" 20 | depends_on: 21 | postgres: 22 | condition: service_healthy 23 | logging: 24 | driver: json-file 25 | options: 26 | max-size: 100m 27 | 28 | postgres: 29 | image: dojot/postgres:9.4.24-alpine 30 | restart: always 31 | healthcheck: 32 | test: ["CMD", "pg_isready", "-U", "postgres"] 33 | interval: 10s 34 | timeout: 5s 35 | retries: 5 36 | 37 | data-broker-redis: 38 | image: dojot/redis:5.0.5-alpine3.10 39 | restart: always 40 | networks: 41 | default: 42 | aliases: 43 | - dbmredis 44 | 45 | kafka: 46 | image: "ches/kafka:0.10.1.1" 47 | depends_on: 48 | - zookeeper 49 | restart: always 50 | environment: 51 | ZOOKEEPER_IP: zookeeper 52 | KAFKA_NUM_PARTITIONS: 10 53 | 54 | device-manager: 55 | image: dojot/device-manager 56 | depends_on: 57 | - postgres 58 | - kafka 59 | environment: 60 | - DEV_MNGR_CRYPTO_PASS="kamehameHA" 61 | - DEV_MNGR_CRYPTO_IV=1234567890123456 62 | - DEV_MNGR_CRYPTO_SALT="shuriken" 63 | 64 | test-runner: 65 | environment: 66 | - DEV_MNGR_CRYPTO_PASS="kamehameHA" 67 | - DEV_MNGR_CRYPTO_IV=1234567890123456 68 | - DEV_MNGR_CRYPTO_SALT="shuriken" 69 | image: dredd/test 70 | -------------------------------------------------------------------------------- /tests/dredd-hooks/authentication_hook.py: -------------------------------------------------------------------------------- 1 | import dredd_hooks as hooks 2 | import json 3 | from token_generator import generate_token 4 | 5 | 6 | @hooks.before_each 7 | def auth_before_each_hook(transaction): 8 | auth = generate_token() 9 | if 'request' in transaction: 10 | if 'headers' in transaction['request'] and 'Authorization' in transaction['request']['headers']: 11 | transaction['request']['headers']['Authorization'] = auth 12 | -------------------------------------------------------------------------------- /tests/dredd-hooks/operation_hook.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import dredd_hooks as hooks 3 | import json 4 | import re 5 | 6 | # This shouldn't be needed 7 | from werkzeug.datastructures import MultiDict 8 | 9 | from DeviceManager.DeviceHandler import DeviceHandler 10 | from DeviceManager.TemplateHandler import TemplateHandler 11 | from token_generator import generate_token 12 | 13 | 14 | class Request: 15 | def __init__(self, data): 16 | self.headers = data['headers'] 17 | self.args = data['args'] 18 | self.data = data['body'] 19 | 20 | 21 | def sort_attributes(device, attribute): 22 | device[attribute] = sorted(device[attribute], key=lambda k: k['label']) 23 | 24 | def create_sample_template(): 25 | template = { 26 | "label": "SensorModel", 27 | "attrs": [ 28 | { 29 | "label": "temperature", 30 | "type": "dynamic", 31 | "value_type": "float" 32 | }, 33 | { 34 | "label": "position", 35 | "type": "dynamic", 36 | "value_type": "geopoint" 37 | }, 38 | { 39 | "label": "model-id", 40 | "type": "static", 41 | "value_type": "string", 42 | "static_value": "model-001" 43 | }, 44 | { 45 | "label": "shared_key", 46 | "type": "static", 47 | "value_type": "psk" 48 | } 49 | ] 50 | } 51 | req = { 52 | 'headers': { 53 | 'authorization': generate_token(), 54 | 'Content-Type': 'application/json' 55 | }, 56 | 'args': {}, 57 | 'body': json.dumps(template) 58 | } 59 | 60 | params = { 61 | 'content_type': 'application/json', 62 | 'data': json.dumps(template) 63 | } 64 | 65 | result = TemplateHandler.create_template(params, generate_token()) 66 | template_id = result['template']['id'] 67 | return template_id 68 | 69 | def create_actuator_template(): 70 | template = { 71 | "label": "SensorModel", 72 | "attrs": [ 73 | { 74 | "label": "temperature", 75 | "type": "dynamic", 76 | "value_type": "float" 77 | }, 78 | { 79 | "label": "battery", 80 | "type": "actuator", 81 | "value_type": "float" 82 | }, 83 | { 84 | "label": "position", 85 | "type": "dynamic", 86 | "value_type": "geopoint" 87 | }, 88 | { 89 | "label": "model-id", 90 | "type": "static", 91 | "value_type": "string", 92 | "static_value": "model-001" 93 | }, 94 | { 95 | "label": "shared_key", 96 | "type": "static", 97 | "value_type": "psk" 98 | } 99 | ] 100 | } 101 | req = { 102 | 'headers': { 103 | 'authorization': generate_token(), 104 | 'Content-Type': 'application/json' 105 | }, 106 | 'args': {}, 107 | 'body': json.dumps(template) 108 | } 109 | 110 | params = { 111 | 'content_type': 'application/json', 112 | 'data': json.dumps(template) 113 | } 114 | 115 | result = TemplateHandler.create_template(params, generate_token()) 116 | template_id = result['template']['id'] 117 | return template_id 118 | 119 | @hooks.before('Templates > Templates > Get the current list of templates') 120 | @hooks.before('Devices > Device info > Get the current list of devices > Example 1') 121 | @hooks.before('Devices > Device info > Get the current list of devices > Example 2') 122 | def remove_filters(transaction): 123 | original = transaction['fullPath'] 124 | if '?' in original: 125 | base = original[:original.index('?')] 126 | params = re.findall(r'(?<=[\?&])([\w-]+)=([\w-]+)', original) 127 | filtered = list(filter(lambda x: x[0] not in ['attr', 'label'], params)) 128 | transaction['fullPath'] = base + '?' + '&'.join(map(lambda x: "{}={}".format(x[0], x[1]), filtered)) 129 | 130 | 131 | @hooks.before('Templates > Templates > Get the current list of templates') 132 | @hooks.before('Templates > Template info > Get template info') 133 | @hooks.before('Templates > Template info > Update template info') 134 | @hooks.before('Templates > Template info > Delete template') 135 | def register_new_template(transaction): 136 | template_id = create_sample_template() 137 | if not 'proprietary' in transaction: 138 | transaction['proprietary'] = {} 139 | transaction['proprietary']['template_id'] = template_id 140 | 141 | 142 | @hooks.before('Devices > Device info > Update device info') 143 | @hooks.before('Devices > Devices > Register a new device and generate its id') 144 | @hooks.before('Devices > > Register a new single device with the entered id') 145 | def register_new_device(transaction): 146 | template_id = create_sample_template() 147 | if not 'proprietary' in transaction: 148 | transaction['proprietary'] = {} 149 | transaction['proprietary']['template_id'] = template_id 150 | device_json = json.loads(transaction['request']['body']) 151 | device_json['templates'] = [template_id] 152 | transaction['request']['body'] = json.dumps(device_json) 153 | 154 | 155 | @hooks.before('Devices > Device info > Get the current list of devices > Example 2') 156 | @hooks.before('Internal > Device > Get the current list of devices > Example 2') 157 | def update_onlyids_query(transaction): 158 | transaction['request']['uri'] = transaction['request']['uri'].replace('idsOnly=false', 159 | 'idsOnly=true') 160 | transaction['fullPath'] = transaction['fullPath'].replace('idsOnly=false', 'idsOnly=true') 161 | 162 | 163 | @hooks.before('Devices > Device info > Get the current list of devices > Example 1') 164 | @hooks.before('Devices > Device info > Get the current list of devices associated with given template') 165 | def create_single_device(transaction): 166 | template_id = create_sample_template() 167 | if not 'proprietary' in transaction: 168 | transaction['proprietary'] = {} 169 | transaction['proprietary']['template_id'] = template_id 170 | device = { 171 | "label": "test_device", 172 | "templates": [template_id] 173 | } 174 | req = { 175 | 'headers': { 176 | 'authorization': generate_token(), 177 | 'Content-Type': 'application/json' 178 | }, 179 | 'args': { 180 | 'count': 1, 181 | 'verbose': False 182 | }, 183 | 'body': json.dumps(device) 184 | } 185 | 186 | params = { 187 | 'count': '1', 188 | 'verbose': 'False', 189 | 'content_type': 'application/json', 190 | 'data': json.dumps(device) 191 | } 192 | 193 | result = DeviceHandler.create_device(params, generate_token()) 194 | device_id = result['devices'][0]['id'] 195 | transaction['proprietary']['device_id'] = device_id 196 | return device_id 197 | 198 | @hooks.before('Devices > Device info > Configure device') 199 | def create_actuator_device(transaction): 200 | template_id = create_actuator_template(); 201 | if not 'proprietary' in transaction: 202 | transaction['proprietary'] = {} 203 | transaction['proprietary']['template_id'] = template_id 204 | device = { 205 | "label": "test_device", 206 | "templates": [template_id] 207 | } 208 | req = { 209 | 'headers': { 210 | 'authorization': generate_token(), 211 | 'Content-Type': 'application/json' 212 | }, 213 | 'args': { 214 | 'count': 1, 215 | 'verbose': False 216 | }, 217 | 'body': json.dumps(device) 218 | } 219 | 220 | params = { 221 | 'count': '1', 222 | 'verbose': 'False', 223 | 'content_type': 'application/json', 224 | 'data': json.dumps(device) 225 | } 226 | 227 | 228 | result = DeviceHandler.create_device(params, generate_token()) 229 | device_id = result['devices'][0]['id'] 230 | transaction['proprietary']['device_id'] = device_id 231 | return device_id 232 | 233 | 234 | @hooks.before('Internal > Device > Get the current list of devices > Example 1') 235 | def create_single_device_and_gen_psk(transaction): 236 | device_id = create_single_device(transaction) 237 | DeviceHandler.gen_psk(generate_token(), device_id, 16, None) 238 | 239 | @hooks.before('Devices > Device info > Get device info') 240 | @hooks.before('Devices > Device info > Update device info') 241 | @hooks.before('Devices > Device info > Delete device') 242 | @hooks.before('Devices > PSK Manipulation > Generate PSK') 243 | @hooks.before('Devices > Device info > Delete all devices') 244 | def create_device_and_update_device_id(transaction): 245 | device_id = create_single_device(transaction) 246 | transaction['fullPath'] = transaction['fullPath'].replace('efac', device_id) 247 | return device_id 248 | 249 | @hooks.before('Devices > Device info > Configure device') 250 | def create_actuate_device_and_update_device_id(transaction): 251 | device_id = create_actuator_device(transaction) 252 | transaction['fullPath'] = transaction['fullPath'].replace('efac', device_id) 253 | return device_id 254 | 255 | @hooks.before('Internal > Device > Get device info') 256 | def prepare_env_psk(transaction): 257 | device_id = create_device_and_update_device_id(transaction) 258 | DeviceHandler.gen_psk(generate_token(), device_id, 16, None) 259 | 260 | @hooks.before('Devices > PSK Manipulation > Copy PSK') 261 | def prepare_copy_psk_env(transaction): 262 | device_id = create_single_device(transaction) 263 | transaction['fullPath'] = transaction['fullPath'].replace('efac', device_id) 264 | DeviceHandler.gen_psk(generate_token(), device_id, 16, None) 265 | device_id = create_single_device(transaction) 266 | transaction['fullPath'] = transaction['fullPath'].replace('acaf', device_id) 267 | 268 | @hooks.before_validation('Devices > Device info > Get device info') 269 | @hooks.before_validation('Internal > Device > Get device info') 270 | def update_expected_ids_single_device(transaction): 271 | template_id = transaction['proprietary']['template_id'] 272 | device_id = transaction['proprietary']['device_id'] 273 | 274 | expected_body = json.loads(transaction['expected']['body']) 275 | str_template_id = "{}".format(template_id) 276 | expected_body["attrs"][str_template_id] = expected_body["attrs"].pop("4865") 277 | for attr in expected_body["attrs"][str_template_id]: 278 | attr['template_id'] = str_template_id 279 | expected_body["templates"] = [str_template_id] 280 | expected_body["id"] = device_id 281 | transaction['expected']['body'] = json.dumps(expected_body) 282 | 283 | 284 | @hooks.before_validation('Devices > Device info > Update device info') 285 | def update_expected_ids_single_device_update(transaction): 286 | template_id = transaction['proprietary']['template_id'] 287 | device_id = transaction['proprietary']['device_id'] 288 | 289 | expected_body = json.loads(transaction['expected']['body']) 290 | str_template_id = "{}".format(template_id) 291 | expected_body["device"]["attrs"][str_template_id] = expected_body["device"]["attrs"].pop("4865") 292 | for attr in expected_body["device"]["attrs"][str_template_id]: 293 | attr['template_id'] = str_template_id 294 | expected_body["device"]["templates"] = [str_template_id] 295 | expected_body["device"]["id"] = device_id 296 | transaction['expected']['body'] = json.dumps(expected_body) 297 | 298 | 299 | @hooks.before('Devices > Device info > Delete device') 300 | def update_expected_ids_single_device_delete(transaction): 301 | template_id = transaction['proprietary']['template_id'] 302 | device_id = transaction['proprietary']['device_id'] 303 | 304 | expected_body = json.loads(transaction['expected']['body']) 305 | str_template_id = "{}".format(template_id) 306 | expected_body["removed_device"]["attrs"][str_template_id] = expected_body["removed_device"]["attrs"].pop("4865") 307 | for attr in expected_body["removed_device"]["attrs"][str_template_id]: 308 | attr['template_id'] = str_template_id 309 | expected_body["removed_device"]["templates"] = [str_template_id] 310 | expected_body["removed_device"]["id"] = device_id 311 | transaction['expected']['body'] = json.dumps(expected_body) 312 | 313 | @hooks.before('Devices > Device info > Delete all devices') 314 | def update_expected_ids_single_device_actuator_delete(transaction): 315 | template_id = transaction['proprietary']['template_id'] 316 | device_id = transaction['proprietary']['device_id'] 317 | 318 | expected_body = json.loads(transaction['expected']['body']) 319 | str_template_id = "{}".format(template_id) 320 | expected_body["removed_devices"][0]["attrs"][str_template_id] = expected_body["removed_devices"][0]["attrs"].pop("4865") 321 | for attr in expected_body["removed_devices"][0]["attrs"][str_template_id]: 322 | attr['template_id'] = str_template_id 323 | expected_body["removed_devices"][0]["templates"] = [str_template_id] 324 | expected_body["removed_devices"][0]["id"] = device_id 325 | transaction['expected']['body'] = json.dumps(expected_body) 326 | 327 | @hooks.before('Devices > Device info > Get the current list of devices associated with given template') 328 | @hooks.before('Devices > Device info > Get the current list of devices > Example 1') 329 | @hooks.before('Internal > Device > Get the current list of devices > Example 1') 330 | def update_expected_ids_multiple_devices(transaction): 331 | template_id = transaction['proprietary']['template_id'] 332 | device_id = transaction['proprietary']['device_id'] 333 | 334 | expected_body = json.loads(transaction['expected']['body']) 335 | str_template_id = "{}".format(template_id) 336 | expected_body["devices"][0]["attrs"][str_template_id] = expected_body["devices"][0]["attrs"].pop("4865") 337 | for attr in expected_body["devices"][0]["attrs"][str_template_id]: 338 | attr['template_id'] = str_template_id 339 | expected_body["devices"][0]["templates"] = [str_template_id] 340 | expected_body["devices"][0]["id"] = device_id 341 | transaction['expected']['body'] = json.dumps(expected_body) 342 | 343 | 344 | @hooks.before('Devices > Device info > Get the current list of devices associated with given template') 345 | @hooks.before('Templates > Template info > Get template info') 346 | @hooks.before('Templates > Template info > Update template info') 347 | @hooks.before('Templates > Template info > Delete template') 348 | def update_template_id(transaction): 349 | template_id = transaction['proprietary']['template_id'] 350 | transaction['fullPath'] = transaction['fullPath'].replace('4865', '{}'.format(template_id)) 351 | 352 | 353 | @hooks.after_each 354 | def clean_scenario(transaction): 355 | # This shouldn't be needed - controller class shouln't expose flask dependent params 356 | # TODO remove 357 | args = MultiDict([ 358 | ('page_size', 10), 359 | ('page_num', 1), 360 | ('attr_format', 'both') 361 | ]) 362 | req = { 363 | 'headers': { 364 | 'authorization': generate_token() 365 | }, 366 | 'args': args, 367 | 'body': '' 368 | } 369 | 370 | params = { 371 | 'page_size': 10, 372 | 'page_num': 1, 373 | 'attr_format': 'both', 374 | 'attr': [], 375 | 'attr_type': [], 376 | 'idsOnly':'false' 377 | } 378 | 379 | token = generate_token() 380 | 381 | result = DeviceHandler.get_devices(token, params) 382 | 383 | for device in result['devices']: 384 | DeviceHandler.delete_device(device['id'], token) 385 | 386 | result = TemplateHandler.get_templates(params, token) 387 | for template in result['templates']: 388 | TemplateHandler.remove_template(template['id'], token) 389 | 390 | 391 | @hooks.before_validation('Templates > Templates > Get the current list of templates') 392 | def order_attributes(transaction): 393 | template = json.loads(transaction['expected']['body']) 394 | sort_attributes(template['templates'][0], 'data_attrs') 395 | sort_attributes(template['templates'][0], 'attrs') 396 | transaction['expected']['body'] = json.dumps(template) 397 | 398 | template = json.loads(transaction['real']['body']) 399 | sort_attributes(template['templates'][0], 'data_attrs') 400 | sort_attributes(template['templates'][0], 'attrs') 401 | transaction['real']['body'] = json.dumps(template) 402 | -------------------------------------------------------------------------------- /tests/dredd-hooks/token_generator.py: -------------------------------------------------------------------------------- 1 | import jwt 2 | 3 | 4 | def generate_token(): 5 | service = 'admin' 6 | encode_data = { 7 | 'userid': 1, 'name': 'Admin (superuser)', 'groups': [1], 'iat': 8 | 1517339633, 'exp': 1517340053, 'email': 'admin@noemail.com', 'profile': 9 | 'admin', 'iss': 'eGfIBvOLxz5aQxA92lFk5OExZmBMZDDh', 'service': service, 10 | 'jti': '7e3086317df2c299cef280932da856e5', 'username': 'admin' 11 | } 12 | 13 | jwt_token = jwt.encode(encode_data, 'secret', algorithm='HS256').decode() 14 | 15 | # Substitute Authorization with actual token 16 | auth = 'Bearer ' + jwt_token 17 | return auth 18 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | dredd_hooks==0.1.3 2 | PyJWT==1.5.3 -------------------------------------------------------------------------------- /tests/start-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python ./docker/waitForDb.py 3 | 4 | for i in {1..10} ; do 5 | curl -sS -o /dev/null "http://device-manager:5000" 6 | status=$? 7 | if [[ "$status" == "0" ]] ; then 8 | dredd 9 | exit $? 10 | fi 11 | sleep 2 12 | done 13 | 14 | echo "Service under test timed out." 15 | exit 1 16 | -------------------------------------------------------------------------------- /tests/test_app.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from DeviceManager.app import app 4 | 5 | 6 | class TestApp(unittest.TestCase): 7 | 8 | def test_should_use_JSONIFY_PRETTYPRINT_REGULAR_property_off(self): 9 | self.assertFalse(app.config['JSONIFY_PRETTYPRINT_REGULAR']) 10 | -------------------------------------------------------------------------------- /tests/test_backend_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | from kafka import KafkaProducer 6 | 7 | from DeviceManager.BackendHandler import KafkaHandler, KafkaInstanceHandler 8 | 9 | class TestBackendHandler(unittest.TestCase): 10 | 11 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 12 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 13 | def test_create_event(self, kafka_instance_mock, kafka_flush): 14 | 15 | device = {'templates': [369], 'label': 'test_device', 16 | 'id': 1, 'created': '2019-08-29T18:18:07.801602+00:00'} 17 | 18 | KafkaHandler().create(device, meta={"service": 'admin'}) 19 | self.assertTrue(kafka_flush.called) 20 | 21 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 22 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 23 | def test_remove_event(self, kafka_instance_mock, kafka_flush): 24 | 25 | device = {'templates': [369], 'label': 'test_device', 26 | 'id': 1, 'created': '2019-08-29T18:18:07.801602+00:00'} 27 | 28 | KafkaHandler().remove(device, meta={"service": 'admin'}) 29 | self.assertTrue(kafka_flush.called) 30 | 31 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 32 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 33 | def test_update_event(self, kafka_instance_mock, kafka_flush): 34 | 35 | device = {'templates': [369], 'label': 'test_device', 36 | 'id': 1, 'created': '2019-08-29T18:18:07.801602+00:00'} 37 | 38 | KafkaHandler().update(device, meta={"service": 'admin'}) 39 | self.assertTrue(kafka_flush.called) 40 | 41 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 42 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 43 | def test_configure_event(self, kafka_instance_mock, kafka_flush): 44 | 45 | device = {'templates': [369], 'label': 'test_device', 46 | 'id': 1, 'created': '2019-08-29T18:18:07.801602+00:00'} 47 | 48 | KafkaHandler().configure(device, meta={"service": 'admin'}) 49 | self.assertTrue(kafka_flush.called) 50 | 51 | def test_verify_intance_kafka(self): 52 | with patch('DeviceManager.BackendHandler.KafkaHandler') as mock_kafka_instance_wrapper: 53 | mock_kafka_instance_wrapper.return_value = Mock() 54 | self.assertIsNotNone(KafkaInstanceHandler().getInstance(None)) 55 | -------------------------------------------------------------------------------- /tests/test_database_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch 5 | 6 | 7 | from DeviceManager.DatabaseHandler import MultiTenantSQLAlchemy, before_request 8 | 9 | from flask import Flask, g 10 | from flask_sqlalchemy import SQLAlchemy 11 | 12 | class TestDatabaseHandler(unittest.TestCase): 13 | 14 | app = Flask(__name__) 15 | 16 | def test_check_binds_multi_tenancy(self): 17 | self.assertIsNone(MultiTenantSQLAlchemy().check_binds('test_bind_sql_alchemy')) 18 | 19 | def test_choose_tenant_multi_tenancy(self): 20 | with self.app.test_request_context(): 21 | self.assertIsNone(MultiTenantSQLAlchemy().choose_tenant('test_bind_sql_alchemy')) 22 | 23 | def test_get_engine_multi_tenancy(self): 24 | with self.app.test_request_context(): 25 | with self.assertRaises(RuntimeError): 26 | MultiTenantSQLAlchemy().get_engine() 27 | 28 | self.app.config['SQLALCHEMY_BINDS'] = { 29 | 'test_bind_sql_alchemy': 'postgresql+psycopg2://postgres@postgres/dojot_devm', 30 | } 31 | 32 | sql_alchemy = SQLAlchemy(self.app) 33 | sql_alchemy.init_app(self.app) 34 | 35 | self.assertIsNotNone(MultiTenantSQLAlchemy().get_engine(self.app, 'test_bind_sql_alchemy')) 36 | 37 | def test_before_request(self): 38 | with self.app.test_request_context(): 39 | result = before_request() 40 | self.assertEqual(result.status, '401 UNAUTHORIZED') 41 | self.assertEqual(json.loads(result.response[0])[ 42 | 'message'], 'No authorization token has been supplied') 43 | -------------------------------------------------------------------------------- /tests/test_device_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | from flask import Flask 6 | 7 | from DeviceManager.DeviceHandler import DeviceHandler, flask_delete_all_device, flask_get_device, flask_remove_device, flask_add_template_to_device, flask_remove_template_from_device, flask_gen_psk,flask_internal_get_device 8 | from DeviceManager.utils import HTTPRequestError 9 | from DeviceManager.DatabaseModels import Device, DeviceAttrsPsk, DeviceAttr 10 | from DeviceManager.DatabaseModels import assert_device_exists 11 | from DeviceManager.BackendHandler import KafkaInstanceHandler 12 | import DeviceManager.DatabaseModels 13 | from DeviceManager.SerializationModels import ValidationError 14 | 15 | from .token_test_generator import generate_token 16 | 17 | from alchemy_mock.mocking import AlchemyMagicMock, UnifiedAlchemyMagicMock 18 | 19 | 20 | class TestDeviceHandler(unittest.TestCase): 21 | 22 | app = Flask(__name__) 23 | 24 | @patch('flask_sqlalchemy._QueryProperty.__get__') 25 | def test_generate_deviceId(self, query_property_getter_mock): 26 | query_property_getter_mock.return_value.filter_by.return_value.first.return_value = None 27 | self.assertIsNotNone(DeviceHandler.generate_device_id()) 28 | 29 | query_property_getter_mock.return_value.filter_by.return_value.first.return_value = 'existed_device_id' 30 | with pytest.raises(HTTPRequestError): 31 | DeviceHandler.generate_device_id() 32 | 33 | @patch('DeviceManager.DeviceHandler.db') 34 | def test_get_devices(self, db_mock): 35 | db_mock.session = UnifiedAlchemyMagicMock() 36 | 37 | db_mock.session.paginate().items = [Device(id=1, label='test_device1')] 38 | 39 | params_query = {'page_number': 5, 'per_page': 1, 'sortBy': None, 'attr': [ 40 | ], 'idsOnly': 'false', 'attr_type': [], 'label': 'test_device1'} 41 | token = generate_token() 42 | 43 | result = DeviceHandler.get_devices(token, params_query) 44 | 45 | self.assertIsNotNone(result) 46 | self.assertTrue(json.dumps(result['devices'])) 47 | 48 | params_query = {'page_number': 1, 'per_page': 1, 'sortBy': None, 49 | 'attr': [], 'idsOnly': 'false', 'attr_type': []} 50 | result = DeviceHandler.get_devices(token, params_query) 51 | self.assertTrue(json.dumps(result['devices'])) 52 | self.assertIsNotNone(result) 53 | 54 | params_query = {'page_number': 1, 'per_page': 1, 'sortBy': None, 'attr': [ 55 | 'foo=bar'], 'idsOnly': 'false', 'attr_type': []} 56 | result = DeviceHandler.get_devices(token, params_query) 57 | self.assertTrue(json.dumps(result['devices'])) 58 | self.assertIsNotNone(result) 59 | 60 | params_query = {'sortBy': None, 'attr': [], 61 | 'idsOnly': 'true', 'attr_type': []} 62 | 63 | with patch.object(DeviceHandler, "get_only_ids", return_value=['4f2b', '1e4a']) as DeviceOnlyIds: 64 | result = DeviceHandler.get_devices(token, params_query) 65 | self.assertTrue(json.dumps(result)) 66 | DeviceOnlyIds.assert_called_once() 67 | 68 | params_query = {'page_number': 5, 'per_page': 1, 'sortBy': None, 'attr': [ 69 | ], 'idsOnly': 'false', 'attr_type': [], 'label': 'test_device1'} 70 | result = DeviceHandler.get_devices(token, params_query, True) 71 | self.assertTrue(json.dumps(result['devices'])) 72 | self.assertIsNotNone(result) 73 | 74 | @patch('DeviceManager.DeviceHandler.db') 75 | def test_list_devicesId(self, db_mock): 76 | db_mock.session = AlchemyMagicMock() 77 | token = generate_token() 78 | 79 | db_mock.session.query(Device.id).all.return_value = ['4f2b', '1e4a'] 80 | 81 | result = DeviceHandler.list_ids(token) 82 | self.assertTrue(json.dumps(result)) 83 | self.assertIsNotNone(result) 84 | 85 | @patch('DeviceManager.DeviceHandler.db') 86 | @patch('flask_sqlalchemy._QueryProperty.__get__') 87 | def test_get_device(self, db_mock, query_property_getter_mock): 88 | db_mock.session = AlchemyMagicMock() 89 | token = generate_token() 90 | 91 | query_property_getter_mock.filter_by.one.return_value = Device( 92 | id=1, label='teste') 93 | result = DeviceHandler.get_device(token, 'device_id') 94 | self.assertIsNotNone(result) 95 | 96 | @patch('DeviceManager.DeviceHandler.db') 97 | @patch('flask_sqlalchemy._QueryProperty.__get__') 98 | def test_delete_device(self, db_mock, query_property_getter_mock): 99 | db_mock.session = AlchemyMagicMock() 100 | token = generate_token() 101 | 102 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()) as KafkaInstanceMock: 103 | result = DeviceHandler.delete_device('device_id', token) 104 | KafkaInstanceMock.assert_called_once() 105 | self.assertIsNotNone(result) 106 | self.assertEqual(result['result'], 'ok') 107 | 108 | @patch('DeviceManager.DeviceHandler.db') 109 | @patch('flask_sqlalchemy._QueryProperty.__get__') 110 | def test_delete_all_devices(self, db_mock, query_property_getter_mock): 111 | db_mock.session = AlchemyMagicMock() 112 | token = generate_token() 113 | 114 | db_mock.session.query(Device).return_value = [ 115 | Device(id=1, label='device_label')] 116 | result = DeviceHandler.delete_all_devices(token) 117 | 118 | self.assertIsNotNone(result) 119 | self.assertEqual(result['result'], 'ok') 120 | 121 | @patch('DeviceManager.DeviceHandler.db') 122 | @patch('flask_sqlalchemy._QueryProperty.__get__') 123 | def test_add_template_to_device(self, db_mock, query_property_getter_mock): 124 | db_mock.session = AlchemyMagicMock() 125 | token = generate_token() 126 | 127 | query_property_getter_mock.filter_by.one.return_value = Device( 128 | id=1, label='device_label') 129 | result = DeviceHandler.add_template_to_device( 130 | token, 'device_id', 'template_id') 131 | self.assertIsNotNone(result) 132 | self.assertIsNotNone(result['device']) 133 | self.assertEqual(result['message'], 'device updated') 134 | 135 | @patch('DeviceManager.DeviceHandler.db') 136 | @patch('flask_sqlalchemy._QueryProperty.__get__') 137 | def test_remove_template_to_device(self, db_mock, query_property_getter_mock): 138 | db_mock.session = AlchemyMagicMock() 139 | token = generate_token() 140 | 141 | query_property_getter_mock.filter_by.one.return_value = Device( 142 | id=1, label='device_label') 143 | result = DeviceHandler.remove_template_from_device( 144 | token, 'device_id', 'template_id') 145 | self.assertIsNotNone(result) 146 | self.assertIsNotNone(result['device']) 147 | self.assertEqual(result['message'], 'device updated') 148 | 149 | @patch('DeviceManager.DeviceHandler.db') 150 | def test_get_devices_by_templateId(self, db_mock): 151 | db_mock.session = AlchemyMagicMock() 152 | token = generate_token() 153 | 154 | params_query = {'page_number': 1, 'per_page': 1} 155 | 156 | db_mock.session.paginate().items = [Device(id=1, label='test_device1')] 157 | result = DeviceHandler.get_by_template( 158 | token, params_query, 'template_id') 159 | self.assertIsNotNone(result) 160 | 161 | @patch('DeviceManager.DeviceHandler.db') 162 | @patch('flask_sqlalchemy._QueryProperty.__get__') 163 | def test_generate_shared_key(self, db_mock_session, query_property_getter_mock): 164 | 165 | device = Device(id=1, label='test_device') 166 | token = generate_token() 167 | result = None 168 | 169 | with patch('DeviceManager.DeviceHandler.assert_device_exists') as mock_device_exist_wrapper: 170 | mock_device_exist_wrapper.return_value = None 171 | 172 | with self.assertRaises(HTTPRequestError): 173 | DeviceHandler.gen_psk(token, 'device_id', 1024) 174 | 175 | mock_device_exist_wrapper.return_value = device 176 | 177 | with patch('DeviceManager.DeviceHandler.serialize_full_device') as mock_serialize_device_wrapper: 178 | mock_serialize_device_wrapper.return_value = {'templates': [369], 'label': 'test_device', 'id': 1, 179 | 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {369: [ 180 | {'label': 'shared_key', 'template_id': '369', 'id': 1504, 'type': 'static', 'created': '2019-08-29T18:18:07.778178+00:00', 181 | 'value_type': 'psk'}]}} 182 | 183 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 184 | query_property_getter_mock.return_value.session.return_value.query.return_value.filter_by.first.return_value = None 185 | result = DeviceHandler.gen_psk(token, 'device_id', 1024) 186 | self.assertIsNotNone(result) 187 | 188 | query_property_getter_mock.return_value.session.return_value.query.return_value.filter_by.first.return_value = MagicMock() 189 | result = DeviceHandler.gen_psk(token, 'device_id', 1024) 190 | self.assertIsNotNone(result) 191 | 192 | result = DeviceHandler.gen_psk( 193 | token, 'device_id', 1024, ['shared_key']) 194 | self.assertIsNotNone(result) 195 | 196 | with self.assertRaises(HTTPRequestError): 197 | DeviceHandler.gen_psk(token, 'device_id', 1024, [ 198 | 'shared_key_not_contains']) 199 | 200 | with self.assertRaises(HTTPRequestError): 201 | DeviceHandler.gen_psk(token, 'device_id', 1025) 202 | 203 | with self.assertRaises(HTTPRequestError): 204 | DeviceHandler.gen_psk(token, 'device_id', 0) 205 | 206 | with self.assertRaises(HTTPRequestError): 207 | DeviceHandler.gen_psk(token, 'device_id', -1) 208 | 209 | @patch('DeviceManager.DeviceHandler.db') 210 | @patch('flask_sqlalchemy._QueryProperty.__get__') 211 | def test_copy_shared_key(self, db_mock_session, query_property_getter_mock): 212 | 213 | deviceSrc = Device(id=1, label='test_device') 214 | token = generate_token() 215 | result = None 216 | 217 | with patch('DeviceManager.DeviceHandler.assert_device_exists') as mock_device_exist_wrapper: 218 | mock_device_exist_wrapper.return_value = None 219 | 220 | with self.assertRaises(HTTPRequestError): 221 | DeviceHandler.copy_psk( 222 | token, 'device_id_src', 'label', 'device_id_dest', 'label') 223 | 224 | mock_device_exist_wrapper.return_value = deviceSrc 225 | 226 | with patch('DeviceManager.DeviceHandler.serialize_full_device') as mock_serialize_device_wrapper: 227 | mock_serialize_device_wrapper.return_value = {'templates': [369], 'label': 'test_device', 'id': 1, 228 | 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {369: [ 229 | {'static_value': 'model-001', 'label': 'shared_key', 'value_type': 'psk', 230 | 'type': 'static', 'template_id': '391', 'id': 1591, 231 | 'created': '2019-08-29T18:24:43.490221+00:00', 'is_static_overridden': False}]}} 232 | 233 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 234 | query_property_getter_mock.return_value.session.return_value.query.return_value.filter_by.first.return_value = None 235 | 236 | with self.assertRaises(HTTPRequestError): 237 | DeviceHandler.copy_psk( 238 | token, 'device_id_src', 'label_not_exist_dest_src', 'device_id_dest', 'label_not_exist_dest') 239 | 240 | result = DeviceHandler.copy_psk( 241 | token, 'device_id_src', 'shared_key', 'device_id_dest', 'shared_key') 242 | self.assertIsNone(result) 243 | 244 | def test_validate_device_id__should_pass_validation(self): 245 | DeviceHandler.validate_device_id('8d73f1') 246 | 247 | self.assertTrue(True) 248 | 249 | def test_validate_device_id__should_raise_an_error__when_id_is_invalid(self): 250 | with pytest.raises(ValidationError) as validationError: 251 | DeviceHandler.validate_device_id('8dp3p1') 252 | 253 | self.assertEqual(validationError.value.messages[0], 'Device ID must be 2-6 characters and must be hexadecimal (0-9,a-f,A-F).') 254 | 255 | def test_validate_device_id__should_raise_an_error__when_id_longer_than_6(self): 256 | with pytest.raises(ValidationError) as validationError: 257 | DeviceHandler.validate_device_id('8d33f222') 258 | 259 | self.assertEqual(validationError.value.messages[0], 'Device ID must be 2-6 characters and must be hexadecimal (0-9,a-f,A-F).') 260 | 261 | @patch('DeviceManager.DeviceHandler.db') 262 | @patch('flask_sqlalchemy._QueryProperty.__get__') 263 | def test_create_device(self, db_mock_session, query_property_getter_mock): 264 | db_mock_session.session = AlchemyMagicMock() 265 | token = generate_token() 266 | 267 | data = '{"label":"test_device","templates":[1]}' 268 | 269 | with patch('DeviceManager.DeviceHandler.DeviceHandler.generate_device_id') as mock_device_id: 270 | mock_device_id.return_value = 'test_device_id' 271 | 272 | with patch('DeviceManager.DeviceHandler.DeviceHandler.validate_device_id') as mock_validate_device_id: 273 | mock_validate_device_id.return_value = True 274 | 275 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 276 | 277 | params = {'count': '1', 'verbose': 'false', 278 | 'content_type': 'application/json', 'data': data} 279 | result = DeviceHandler.create_device(params, token) 280 | 281 | self.assertIsNotNone(result) 282 | self.assertTrue(result['devices']) 283 | self.assertEqual(result['message'], 'devices created') 284 | self.assertEqual(result['devices'][0]['id'], 'test_device_id') 285 | self.assertEqual(result['devices'][0]['label'], 'test_device') 286 | 287 | params = {'count': '1', 'verbose': 'true', 288 | 'content_type': 'application/json', 'data': data} 289 | result = DeviceHandler.create_device(params, token) 290 | self.assertIsNotNone(result) 291 | self.assertTrue(result['devices']) 292 | self.assertEqual(result['message'], 'device created') 293 | 294 | # Here contains the validation when the count is not a number 295 | params = {'count': 'is_not_a_number', 'verbose': 'false', 296 | 'content_type': 'application/json', 'data': data} 297 | 298 | with self.assertRaises(HTTPRequestError): 299 | result = DeviceHandler.create_device(params, token) 300 | 301 | # Here contains the HttpRequestError validating de count with verbose 302 | params = {'count': '2', 'verbose': 'true', 303 | 'content_type': 'application/json', 'data': data} 304 | 305 | with self.assertRaises(HTTPRequestError): 306 | result = DeviceHandler.create_device(params, token) 307 | 308 | @patch('DeviceManager.DeviceHandler.db') 309 | @patch('flask_sqlalchemy._QueryProperty.__get__') 310 | def test_update_device(self, db_mock_session, query_property_getter_mock): 311 | db_mock_session.session = AlchemyMagicMock() 312 | token = generate_token() 313 | 314 | data = '{"label": "test_updated_device", "templates": [4865]}' 315 | 316 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 317 | params = {'content_type': 'application/json', 'data': data} 318 | result = DeviceHandler.update_device( 319 | params, 'test_device_id', token) 320 | self.assertIsNotNone(result) 321 | self.assertEqual(result['message'], 'device updated') 322 | self.assertIsNotNone(result['device']) 323 | 324 | @patch('DeviceManager.DeviceHandler.db') 325 | @patch('flask_sqlalchemy._QueryProperty.__get__') 326 | def test_configure_device(self, db_mock_session, query_property_getter_mock): 327 | db_mock_session.session = AlchemyMagicMock() 328 | token = generate_token() 329 | 330 | device = Device(id=1, label='test_device') 331 | 332 | with patch('DeviceManager.DeviceHandler.assert_device_exists') as mock_device_exist_wrapper: 333 | mock_device_exist_wrapper.return_value = device 334 | 335 | with patch('DeviceManager.DeviceHandler.serialize_full_device') as mock_serialize_device_wrapper: 336 | mock_serialize_device_wrapper.return_value = {'templates': [369], 'label': 'test_device', 'id': 1, 337 | 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {369: [ 338 | {'label': 'temperature', 'template_id': '369', 'id': 1504, 'type': 'actuator', 'created': '2019-08-29T18:18:07.778178+00:00', 339 | 'value_type': 'psk'}]}} 340 | 341 | data = '{"topic": "/admin/efac/config", "attrs": {"temperature": 10.6}}' 342 | params = {'data': data} 343 | 344 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 345 | result = DeviceHandler.configure_device(params, 'test_device_id', token) 346 | self.assertIsNotNone(result) 347 | self.assertEqual(result[' status'], 'configuration sent to device') 348 | 349 | data = '{"topic": "/admin/efac/config", "attrs": {"test_attr": "xpto"}}' 350 | params = {'data': data} 351 | 352 | with self.assertRaises(HTTPRequestError): 353 | DeviceHandler.configure_device(params, 'test_device_id', token) 354 | 355 | @patch('DeviceManager.DeviceHandler.db') 356 | @patch('flask_sqlalchemy._QueryProperty.__get__') 357 | def test_endpoint_delete_all_devices(self, db_mock, query_property_getter_mock): 358 | db_mock.session = AlchemyMagicMock() 359 | with self.app.test_request_context(): 360 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 361 | auth_mock.return_value = generate_token() 362 | result = flask_delete_all_device() 363 | self.assertFalse(json.loads(result.response[0])['removed_devices']) 364 | self.assertEqual(json.loads(result.response[0])['result'], 'ok') 365 | 366 | @patch('DeviceManager.DeviceHandler.db') 367 | @patch('flask_sqlalchemy._QueryProperty.__get__') 368 | def test_endpoint_get_device(self, db_mock, query_property_getter_mock): 369 | db_mock.session = AlchemyMagicMock() 370 | with self.app.test_request_context(): 371 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 372 | auth_mock.return_value = generate_token() 373 | with patch.object(DeviceHandler, "get_device") as mock_device: 374 | mock_device.return_value = {'label': 'test_device', 'id':1, 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {}} 375 | result = flask_get_device('test_device_id') 376 | self.assertIsNotNone(result.response) 377 | 378 | @patch('DeviceManager.DeviceHandler.db') 379 | @patch('flask_sqlalchemy._QueryProperty.__get__') 380 | def test_endpoint_remove_device(self, db_mock, query_property_getter_mock): 381 | db_mock.session = AlchemyMagicMock() 382 | with self.app.test_request_context(): 383 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 384 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 385 | auth_mock.return_value = generate_token() 386 | 387 | with patch.object(DeviceHandler, "delete_device") as mock_remove_device: 388 | mock_remove_device.return_value = {'result': 'ok', 'removed_device': {'id': 1, 'label': 'test_device', 'created': '2019-08-29T18:18:07.801602+00:00'}} 389 | result = flask_remove_device('test_device_id') 390 | self.assertIsNotNone(result.response) 391 | self.assertEqual(json.loads(result.response[0])['result'], 'ok') 392 | 393 | @patch('DeviceManager.DeviceHandler.db') 394 | @patch('flask_sqlalchemy._QueryProperty.__get__') 395 | def test_endpoint_flask_add_template_to_device(self, db_mock, query_property_getter_mock): 396 | db_mock.session = AlchemyMagicMock() 397 | with self.app.test_request_context(): 398 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 399 | auth_mock.return_value = generate_token() 400 | 401 | with patch.object(DeviceHandler, "add_template_to_device") as mock_template_to_device: 402 | mock_template_to_device.return_value = {'message': 'device updated', 'id':1, 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {}} 403 | result = flask_add_template_to_device('test_device_id', 'test_template_id') 404 | self.assertIsNotNone(result.response) 405 | self.assertEqual(json.loads(result.response[0])['message'], 'device updated') 406 | 407 | @patch('DeviceManager.DeviceHandler.db') 408 | @patch('flask_sqlalchemy._QueryProperty.__get__') 409 | def test_endpoint_flask_remove_template_from_device(self, db_mock, query_property_getter_mock): 410 | db_mock.session = AlchemyMagicMock() 411 | with self.app.test_request_context(): 412 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 413 | auth_mock.return_value = generate_token() 414 | with patch.object(DeviceHandler, "remove_template_from_device") as mock_template_to_device: 415 | mock_template_to_device.return_value = {'message': 'device updated', 'id':140088130054016, 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {}} 416 | result = flask_remove_template_from_device('test_device_id', 'test_template_id') 417 | self.assertIsNotNone(result.response) 418 | self.assertEqual(json.loads(result.response[0])['message'], 'device updated') 419 | 420 | @patch('DeviceManager.DeviceHandler.db') 421 | @patch('flask_sqlalchemy._QueryProperty.__get__') 422 | def test_endpoint_generate_psk(self, db_mock, query_property_getter_mock): 423 | db_mock.session = AlchemyMagicMock() 424 | with self.app.test_request_context(): 425 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 426 | result = flask_gen_psk('test_device_id') 427 | self.assertEqual(json.loads(result.response[0])['message'], 'Missing key_length parameter') 428 | self.assertEqual(json.loads(result.response[0])['status'], 400) 429 | 430 | with patch("DeviceManager.DeviceHandler.request") as req: 431 | req.args = { 432 | "key_length": "200" 433 | } 434 | 435 | auth_mock.return_value = generate_token() 436 | result = flask_gen_psk('test_device_id') 437 | self.assertEqual(json.loads(result.response[0])['message'], 'ok') 438 | self.assertEqual(json.loads(result.response[0])['status'], 204) 439 | 440 | @patch('DeviceManager.DeviceHandler.db') 441 | @patch('flask_sqlalchemy._QueryProperty.__get__') 442 | def test_endpoint_internal_get_device(self, db_mock, query_property_getter_mock): 443 | db_mock.session = AlchemyMagicMock() 444 | with self.app.test_request_context(): 445 | with patch("DeviceManager.DeviceHandler.retrieve_auth_token") as auth_mock: 446 | auth_mock.return_value = generate_token() 447 | with patch.object(DeviceHandler, "get_device") as mock_getDevice: 448 | mock_getDevice.return_value = {'id': 140110840862312, 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': {}} 449 | result = flask_internal_get_device('test_device_id') 450 | self.assertIsNotNone(result.response) 451 | -------------------------------------------------------------------------------- /tests/test_error_manager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | from flask import Flask 6 | 7 | from DeviceManager.ErrorManager import not_found, internal_error 8 | 9 | class TestErrorManager(unittest.TestCase): 10 | 11 | app = Flask(__name__) 12 | 13 | def test_not_found_endpoint(self): 14 | with self.app.test_request_context(): 15 | result = not_found(Mock()) 16 | self.assertEqual(result.status, '404 NOT FOUND') 17 | self.assertEqual(json.loads(result.response[0])[ 18 | 'msg'], 'Invalid endpoint requested') 19 | 20 | def test_internal_error(self): 21 | with self.app.test_request_context(): 22 | result = internal_error(Mock()) 23 | self.assertEqual(result.status, '500 INTERNAL SERVER ERROR') 24 | self.assertEqual(json.loads(result.response[0])[ 25 | 'msg'], 'Internal Error') 26 | -------------------------------------------------------------------------------- /tests/test_import_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | 6 | from alchemy_mock.mocking import AlchemyMagicMock 7 | 8 | from DeviceManager.ImportHandler import ImportHandler 9 | from DeviceManager.DatabaseModels import Device, DeviceTemplate 10 | from DeviceManager.utils import HTTPRequestError 11 | from DeviceManager.BackendHandler import KafkaInstanceHandler, KafkaHandler 12 | 13 | from .token_test_generator import generate_token 14 | 15 | 16 | class TestImportHandler(unittest.TestCase): 17 | 18 | @patch('DeviceManager.ImportHandler.db') 19 | def test_drop_sequence(self, db_mock): 20 | db_mock.session = AlchemyMagicMock() 21 | self.assertIsNone(ImportHandler.drop_sequences()) 22 | 23 | def test_replaceIds_to_imoprtIds(self): 24 | json_import = '{"id": "test_value"}' 25 | 26 | result = ImportHandler.replace_ids_by_import_ids(json_import) 27 | self.assertIsNotNone(result) 28 | self.assertIn('import_id', result) 29 | 30 | @patch('DeviceManager.ImportHandler.db') 31 | def test_restore_template_sequence(self, db_mock): 32 | db_mock.session = AlchemyMagicMock() 33 | self.assertIsNone(ImportHandler.restore_template_sequence()) 34 | 35 | @patch('DeviceManager.ImportHandler.db') 36 | def test_restore_attr_sequence(self, db_mock): 37 | db_mock.session = AlchemyMagicMock() 38 | self.assertIsNone(ImportHandler.restore_attr_sequence()) 39 | 40 | @patch('DeviceManager.ImportHandler.db') 41 | def test_restore_sequences(self, db_mock): 42 | db_mock.session = AlchemyMagicMock() 43 | self.assertIsNone(ImportHandler.restore_sequences()) 44 | 45 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 46 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 47 | def test_notifies_deletion_to_kafka(self, kafka_instance_mock, kafka_flush): 48 | with patch('DeviceManager.ImportHandler.serialize_full_device') as mock_serialize_device_wrapper: 49 | mock_serialize_device_wrapper.return_value = {'templates': [369], 'label': 'test_device', 'id': 1, 50 | 'created': '2019-08-29T18:18:07.801602+00:00'} 51 | 52 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=KafkaHandler()): 53 | ImportHandler().notifies_deletion_to_kafka('test_device', 'admin') 54 | self.assertTrue(kafka_flush.called) 55 | 56 | @patch('DeviceManager.ImportHandler.db') 57 | def test_delete_records(self, db_mock): 58 | db_mock.session = AlchemyMagicMock() 59 | self.assertIsNone(ImportHandler.delete_records('admin')) 60 | 61 | @patch('DeviceManager.ImportHandler.db') 62 | def test_clear_db_config(self, db_mock): 63 | db_mock.session = AlchemyMagicMock() 64 | self.assertIsNone(ImportHandler.clear_db_config('admin')) 65 | 66 | @patch('DeviceManager.ImportHandler.db') 67 | def test_restore_db_config(self, db_mock): 68 | db_mock.session = AlchemyMagicMock() 69 | self.assertIsNone(ImportHandler.restore_db_config()) 70 | 71 | @patch('DeviceManager.ImportHandler.db') 72 | def test_save_templates(self, db_mock): 73 | db_mock.session = AlchemyMagicMock() 74 | 75 | json_payload = {'templates': [{'import_id': 1, 'attrs': [ 76 | {'label': 'temperature', 'type': 'dynamic', 'value_type': 'float'}]}], 'label': 'test_device', 'id': 1, 77 | 'created': '2019-08-29T18:18:07.801602+00:00'} 78 | 79 | json_data = {"label": "test_device", "id": 1, 80 | "templates": [{'id': 1, 'label': 'test_template'}]} 81 | 82 | result = ImportHandler.save_templates(json_data, json_payload) 83 | self.assertIsNotNone(result) 84 | self.assertTrue(result) 85 | 86 | json_data = {"label": "test_device", "id": 1, 87 | "templates": [{'id': 2, 'label': 'test_template'}]} 88 | result = ImportHandler.save_templates(json_data, json_payload) 89 | self.assertIsNotNone(result) 90 | self.assertFalse(result[0].attrs) 91 | 92 | @patch('DeviceManager.ImportHandler.db') 93 | def test_set_templates_on_device(self, db_mock): 94 | db_mock.session = AlchemyMagicMock() 95 | 96 | json_payload = {'templates': [{'import_id': 1, 'attrs': [ 97 | {'label': 'temperature', 'type': 'dynamic', 'value_type': 'float'}]}], 'label': 'test_device', 'id': 1, 98 | 'created': '2019-08-29T18:18:07.801602+00:00'} 99 | 100 | saved_templates = [DeviceTemplate(label='test_template', attrs=[])] 101 | 102 | self.assertIsNone(ImportHandler.set_templates_on_device( 103 | Mock(), json_payload, saved_templates)) 104 | 105 | @patch('DeviceManager.ImportHandler.db') 106 | def test_save_devices(self, db_mock): 107 | db_mock.session = AlchemyMagicMock() 108 | 109 | json_payload = {"devices": [{"id": "68fc", "label": "test_device_0"}, { 110 | "id": "94dc", "label": "test_device_1"}]} 111 | json_data = {"devices": [{"id": "68fc", "label": "test_device_0"}, { 112 | "id": "94dc", "label": "test_device_1"}]} 113 | 114 | saved_templates = [DeviceTemplate(label='test_template', attrs=[])] 115 | 116 | result = ImportHandler.save_devices( 117 | json_data, json_payload, saved_templates) 118 | self.assertIsNotNone(result) 119 | self.assertTrue(result) 120 | 121 | @patch("DeviceManager.BackendHandler.KafkaNotifier") 122 | @patch("DeviceManager.KafkaNotifier.KafkaProducer.flush") 123 | def test_notifies_creation_to_kafka(self, kafka_instance_mock, kafka_flush): 124 | with patch('DeviceManager.ImportHandler.serialize_full_device') as mock_serialize_device_wrapper: 125 | mock_serialize_device_wrapper.return_value = {'templates': [369], 'label': 'test_device', 'id': 1, 126 | 'created': '2019-08-29T18:18:07.801602+00:00'} 127 | 128 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=KafkaHandler()): 129 | ImportHandler().notifies_creation_to_kafka( 130 | [Device(id=1, label='test_device')], 'admin') 131 | self.assertTrue(kafka_flush.called) 132 | 133 | @patch('DeviceManager.ImportHandler.db') 134 | def test_import_data(self, db_mock): 135 | db_mock.session = AlchemyMagicMock() 136 | token = generate_token() 137 | 138 | data = """{"templates": [{"id": 1, "label": "template1", "attrs": [{"label": "temperature", "type": "dynamic", "value_type": "float"}]}], 139 | "devices": [{"id": "68fc", "label": "test_device_0"},{"id": "94dc","label": "test_device_1"}]}""" 140 | 141 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 142 | result = ImportHandler.import_data(data, token, 'application/json') 143 | self.assertIsNotNone(result) 144 | self.assertEqual(result['message'], 'data imported!') 145 | 146 | data = """{"templates": {"id": 1, "label": "template1", "attrs": [{"label": "temperature", "type": "dynamic", "value_type": "float"}]}, 147 | "devices": [{"id": "68fc", "label": "test_device_0"},{"id": "94dc","label": "test_device_1"}]}""" 148 | 149 | with self.assertRaises(HTTPRequestError): 150 | ImportHandler.import_data(data, token, 'application/json') 151 | -------------------------------------------------------------------------------- /tests/test_kafka_notifier.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | 6 | import requests 7 | from DeviceManager.Logger import Log 8 | from DeviceManager.KafkaNotifier import DeviceEvent, NotificationMessage, KafkaNotifier 9 | 10 | class TestKafkaNotifier(unittest.TestCase): 11 | 12 | def test_notification_message_to_json(self): 13 | data = {'label': 'test_device', 'id': 'test_device_id', 'templates': [1], 'attrs': {}} 14 | kafkaNotification = NotificationMessage(DeviceEvent.CREATE, data, m={"service": 'admin'}) 15 | 16 | result = kafkaNotification.to_json() 17 | self.assertIsNotNone(result) 18 | self.assertIn('event', result) 19 | self.assertIn('data', result) 20 | self.assertIn('meta', result) 21 | 22 | def test_get_topic(self): 23 | with patch.object(KafkaNotifier, "__init__", lambda x: None): 24 | KafkaNotifier.topic_map = {} 25 | 26 | with patch("requests.get") as request_mock: 27 | request_mock.return_value = Mock(status_code=200, json=Mock(return_value={'topic': '83a257de-c421-4529-b42d-5976def7b526'})) 28 | result = KafkaNotifier().get_topic('admin', 'dojot.device-manager.device') 29 | self.assertIsNotNone(result) 30 | self.assertEqual(result, '83a257de-c421-4529-b42d-5976def7b526') 31 | 32 | def test_send_notification(self): 33 | data = {'label': 'test_device', 'id': 'test_device_id', 'templates': [1], 'attrs': {}} 34 | 35 | with patch.object(KafkaNotifier, "__init__", lambda x: None): 36 | KafkaNotifier.kf_prod = Mock() 37 | self.assertIsNone(KafkaNotifier().send_notification(DeviceEvent.CREATE, data, meta={"service": 'admin'})) 38 | 39 | with patch.object(KafkaNotifier, "get_topic", return_value=None): 40 | self.assertIsNone(KafkaNotifier().send_notification(DeviceEvent.CREATE, data, meta={"service": 'admin'})) 41 | 42 | def test_send_raw(self): 43 | event = { 44 | "event": DeviceEvent.TEMPLATE, 45 | "data": { 46 | "affected": [], 47 | "template": {'label': 'SensorModelUpdated', 'config_attrs': [], 'id': 1, 'data_attrs': [], 'attrs': []} 48 | }, 49 | "meta": {"service": 'admin'} 50 | } 51 | 52 | with patch.object(KafkaNotifier, "__init__", lambda x: None): 53 | self.assertIsNone(KafkaNotifier().send_raw(event, 'admin')) 54 | -------------------------------------------------------------------------------- /tests/test_logger_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | 5 | from DeviceManager.LoggerHandler import LoggerHandler, flask_update_log_level, flask_get_log_level 6 | from DeviceManager.utils import HTTPRequestError 7 | from DeviceManager.SerializationModels import log_schema 8 | 9 | from flask import Flask 10 | from unittest.mock import Mock, MagicMock, patch 11 | 12 | class TestLoggerHandler(unittest.TestCase): 13 | 14 | app = Flask(__name__) 15 | 16 | def test_update_valid_level_log(self): 17 | self.assertTrue(LoggerHandler.update_log_level("info"), "") 18 | with self.assertRaises(HTTPRequestError): 19 | LoggerHandler.update_log_level("teste") 20 | 21 | def test_get_actual_level_log(self): 22 | level = LoggerHandler.get_log_level() 23 | self.assertIsNotNone(level) 24 | 25 | def test_endpoint_get_log(self): 26 | with self.app.test_request_context(): 27 | result = flask_get_log_level() 28 | self.assertEqual(result.status, '200 OK') 29 | self.assertEqual(json.loads(result.response[0])[ 30 | 'level'], 'INFO') 31 | 32 | def test_endpoint_update_log(self): 33 | with self.app.test_request_context(): 34 | result = flask_update_log_level() 35 | self.assertEqual(result.status, '400 BAD REQUEST') 36 | self.assertEqual(json.loads(result.response[0])[ 37 | 'message'], 'Payload must be valid JSON, and Content-Type set accordingly') 38 | -------------------------------------------------------------------------------- /tests/test_template_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch, call 5 | from flask import Flask 6 | 7 | from DeviceManager.DatabaseModels import DeviceTemplate 8 | from DeviceManager.TemplateHandler import TemplateHandler, flask_get_templates, flask_delete_all_templates, \ 9 | flask_get_template, flask_remove_template, paginate, attr_format, refresh_template_update_column 10 | from DeviceManager.utils import HTTPRequestError 11 | from DeviceManager.BackendHandler import KafkaInstanceHandler 12 | from datetime import datetime 13 | 14 | 15 | from .token_test_generator import generate_token 16 | 17 | from alchemy_mock.mocking import AlchemyMagicMock, UnifiedAlchemyMagicMock 18 | 19 | 20 | class TestTemplateHandler(unittest.TestCase): 21 | 22 | app = Flask(__name__) 23 | 24 | @patch('DeviceManager.TemplateHandler.db') 25 | def test_get_templates(self, db_mock): 26 | db_mock.session = AlchemyMagicMock() 27 | token = generate_token() 28 | 29 | params_query = {'page_number': 5, 'per_page': 1, 30 | 'sortBy': None, 'attr': [], 'attr_type': [], 'label': 'dummy'} 31 | result = TemplateHandler.get_templates(params_query, token) 32 | self.assertIsNotNone(result) 33 | 34 | # test using attrs 35 | params_query = {'page_number': 1, 'per_page': 1, 36 | 'sortBy': None, 'attr': ['foo=bar'], 'attr_type': []} 37 | result = TemplateHandler.get_templates(params_query, token) 38 | self.assertIsNotNone(result) 39 | 40 | # test using sortBy 41 | params_query = {'page_number': 1, 'per_page': 1, 42 | 'sortBy': 'label', 'attr': ['foo=bar'], 'attr_type': []} 43 | result = TemplateHandler.get_templates(params_query, token) 44 | self.assertIsNotNone(result) 45 | 46 | # test without querys 47 | params_query = {'page_number': 5, 'per_page': 1, 48 | 'sortBy': None, 'attr': [], 'attr_type': []} 49 | result = TemplateHandler.get_templates(params_query, token) 50 | self.assertIsNotNone(result) 51 | 52 | @patch('DeviceManager.TemplateHandler.db') 53 | def test_create_tempĺate(self, db_mock): 54 | db_mock.session = AlchemyMagicMock() 55 | token = generate_token() 56 | 57 | data = """{ 58 | "label": "SensorModel", 59 | "attrs": [ 60 | { 61 | "label": "temperature", 62 | "type": "dynamic", 63 | "value_type": "float" 64 | }, 65 | { 66 | "label": "model-id", 67 | "type": "static", 68 | "value_type": "string", 69 | "static_value": "model-001" 70 | } 71 | ] 72 | }""" 73 | 74 | params_query = {'content_type': 'application/json', 'data': data} 75 | result = TemplateHandler.create_template(params_query, token) 76 | self.assertIsNotNone(result) 77 | self.assertEqual(result['result'], 'ok') 78 | self.assertIsNotNone(result['template']) 79 | 80 | @patch('DeviceManager.TemplateHandler.db') 81 | def test_get_template(self, db_mock): 82 | db_mock.session = AlchemyMagicMock() 83 | token = generate_token() 84 | 85 | template = DeviceTemplate(id=1, label='template1') 86 | params_query = {'attr_format': 'both'} 87 | 88 | with patch('DeviceManager.TemplateHandler.assert_template_exists') as mock_template_exist_wrapper: 89 | mock_template_exist_wrapper.return_value = template 90 | result = TemplateHandler.get_template( 91 | params_query, 'template_id_test', token) 92 | self.assertIsNotNone(result) 93 | 94 | mock_template_exist_wrapper.return_value = None 95 | result = TemplateHandler.get_template( 96 | params_query, 'template_id_test', token) 97 | self.assertFalse(result) 98 | 99 | @patch('DeviceManager.TemplateHandler.db') 100 | def test_delete_all_templates(self, db_mock): 101 | db_mock.session = AlchemyMagicMock() 102 | token = generate_token() 103 | 104 | result = TemplateHandler.delete_all_templates(token) 105 | self.assertIsNotNone(result) 106 | self.assertTrue(result) 107 | self.assertEqual(result['result'], 'ok') 108 | 109 | @patch('DeviceManager.TemplateHandler.db') 110 | def test_remove_template(self, db_mock): 111 | db_mock.session = AlchemyMagicMock() 112 | token = generate_token() 113 | 114 | template = DeviceTemplate(id=1, label='template1') 115 | 116 | with patch('DeviceManager.TemplateHandler.assert_template_exists') as mock_template_exist_wrapper: 117 | mock_template_exist_wrapper.return_value = template 118 | 119 | result = TemplateHandler.remove_template(1, token) 120 | self.assertIsNotNone(result) 121 | self.assertTrue(result) 122 | self.assertTrue(result['removed']) 123 | self.assertEqual(result['result'], 'ok') 124 | 125 | @patch('DeviceManager.TemplateHandler.db') 126 | def test_update_template(self, db_mock): 127 | db_mock.session = AlchemyMagicMock() 128 | token = generate_token() 129 | 130 | template = DeviceTemplate(id=1, label='SensorModel') 131 | 132 | data = """{ 133 | "label": "SensorModelUpdated", 134 | "attrs": [ 135 | { 136 | "label": "temperature", 137 | "type": "dynamic", 138 | "value_type": "float" 139 | }, 140 | { 141 | "label": "model-id", 142 | "type": "static", 143 | "value_type": "string", 144 | "static_value": "model-001" 145 | } 146 | ] 147 | }""" 148 | 149 | params_query = {'content_type': 'application/json', 'data': data} 150 | 151 | with patch('DeviceManager.TemplateHandler.assert_template_exists') as mock_template_exist_wrapper: 152 | mock_template_exist_wrapper.return_value = template 153 | 154 | with patch.object(KafkaInstanceHandler, "getInstance", return_value=MagicMock()): 155 | result = TemplateHandler.update_template( 156 | params_query, 1, token) 157 | self.assertIsNotNone(result) 158 | self.assertTrue(result) 159 | self.assertTrue(result['updated']) 160 | self.assertEqual(result['result'], 'ok') 161 | 162 | def test_attr_format(self): 163 | params = {'data_attrs': [], 'config_attrs': [], 164 | 'id': 1, 'attrs': [], 'label': 'template1'} 165 | 166 | result = attr_format('split', params) 167 | self.assertNotIn('attrs', result) 168 | 169 | result = attr_format('single', params) 170 | self.assertNotIn('config_attrs', result) 171 | self.assertNotIn('data_attrs', result) 172 | 173 | @patch('DeviceManager.TemplateHandler.db') 174 | def test_paginate(self, db_mock): 175 | db_mock.session = AlchemyMagicMock() 176 | 177 | result = paginate(db_mock.session.query, 0, 10, True) 178 | self.assertIsNone(result) 179 | 180 | @patch('DeviceManager.TemplateHandler.db') 181 | def test_endpoint_get_templates(self, db_mock): 182 | db_mock.session = AlchemyMagicMock() 183 | with self.app.test_request_context(): 184 | with patch("DeviceManager.TemplateHandler.retrieve_auth_token") as auth_mock: 185 | auth_mock.return_value = generate_token() 186 | 187 | with patch.object(TemplateHandler, "get_templates") as mock_templates: 188 | mock_templates.return_value = {'pagination': {}, 'templates': []} 189 | result = flask_get_templates() 190 | self.assertEqual(result.status, '200 OK') 191 | self.assertIsNotNone(result) 192 | 193 | @patch('DeviceManager.TemplateHandler.db') 194 | def test_endpoint_delete_all_templates(self, db_mock): 195 | db_mock.session = AlchemyMagicMock() 196 | with self.app.test_request_context(): 197 | with patch("DeviceManager.TemplateHandler.retrieve_auth_token") as auth_mock: 198 | auth_mock.return_value = generate_token() 199 | result = flask_delete_all_templates() 200 | self.assertIsNotNone(result) 201 | self.assertEqual(result.status, '200 OK') 202 | self.assertEqual(json.loads(result.response[0])['result'], 'ok') 203 | 204 | 205 | @patch('DeviceManager.TemplateHandler.db') 206 | @patch('flask_sqlalchemy._QueryProperty.__get__') 207 | def test_endpoint_get_template(self, db_mock, query_property_getter_mock): 208 | db_mock.session = AlchemyMagicMock() 209 | with self.app.test_request_context(): 210 | with patch("DeviceManager.TemplateHandler.retrieve_auth_token") as auth_mock: 211 | auth_mock.return_value = generate_token() 212 | 213 | with patch.object(TemplateHandler, "get_template") as mock_template: 214 | mock_template.return_value = {'label': 'test_template', 'id':1, 'created': '2019-08-29T18:18:07.801602+00:00', 'attrs': []} 215 | result = flask_get_template('test_template_id') 216 | self.assertEqual(result.status, '200 OK') 217 | self.assertIsNotNone(result.response) 218 | 219 | @patch('DeviceManager.TemplateHandler.db') 220 | @patch('flask_sqlalchemy._QueryProperty.__get__') 221 | def test_endpoint_delete_template(self, db_mock, query_property_getter_mock): 222 | db_mock.session = AlchemyMagicMock() 223 | with self.app.test_request_context(): 224 | with patch("DeviceManager.TemplateHandler.retrieve_auth_token") as auth_mock: 225 | auth_mock.return_value = generate_token() 226 | 227 | with patch.object(TemplateHandler, "remove_template") as mock_remove_template: 228 | mock_remove_template.return_value = {'result': 'ok', 'removed': {'id': 1, 'label': 'test_template', 'created': '2019-08-29T18:18:07.801602+00:00'}} 229 | result = flask_remove_template('test_template_id') 230 | self.assertEqual(result.status, '200 OK') 231 | self.assertEqual(json.loads(result.response[0])['result'], 'ok') 232 | 233 | @patch('DeviceManager.TemplateHandler.db') 234 | def test_refresh_template_update_column(self, db_mock): 235 | template = DeviceTemplate(id=1, label='template1') 236 | refresh_template_update_column(db_mock, template) 237 | self.assertTrue(isinstance(template.updated, datetime)) 238 | 239 | @patch('DeviceManager.TemplateHandler.db') 240 | def test_not_refresh_template_update_column(self, db_mock): 241 | template = DeviceTemplate(id=1, label='template1') 242 | db_mock.session.new = set() 243 | db_mock.session.deleted = set() 244 | refresh_template_update_column(db_mock, template) 245 | self.assertIsNone(template.updated) 246 | -------------------------------------------------------------------------------- /tests/test_tenancy_manager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | from unittest.mock import Mock, MagicMock, patch 5 | from sqlalchemy.sql import exists 6 | 7 | from DeviceManager.TenancyManager import install_triggers, create_tenant, init_tenant, list_tenants 8 | 9 | from alchemy_mock.mocking import AlchemyMagicMock, UnifiedAlchemyMagicMock 10 | 11 | class TestTenancyManager(unittest.TestCase): 12 | 13 | def test_install_triggers(self): 14 | db_mock = AlchemyMagicMock() 15 | self.assertIsNone(install_triggers(db_mock, 'admin')) 16 | 17 | def test_create_tenant(self): 18 | db_mock = AlchemyMagicMock() 19 | self.assertIsNone(create_tenant('admin', db_mock)) 20 | 21 | def test_init_tenant(self): 22 | db_mock = Mock(return_value=None) 23 | self.assertIsNone(init_tenant('admin', db_mock)) 24 | 25 | def test_list_tenants(self): 26 | db_mock = AlchemyMagicMock() 27 | self.assertFalse(list_tenants(db_mock)) 28 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import json 3 | import unittest 4 | 5 | from flask import Flask 6 | from DeviceManager.utils import format_response, get_pagination, get_allowed_service, decrypt, retrieve_auth_token 7 | from DeviceManager.utils import HTTPRequestError 8 | 9 | from .token_test_generator import generate_token 10 | 11 | 12 | class Request: 13 | def __init__(self, data): 14 | self.headers = data['headers'] 15 | self.args = data['args'] 16 | self.data = data['body'] 17 | 18 | 19 | class TestUtils(unittest.TestCase): 20 | 21 | app = Flask(__name__) 22 | 23 | def test_format_response(self): 24 | with self.app.test_request_context(): 25 | result = format_response(200, 'Unity test of message formatter') 26 | self.assertEqual(result.status, '200 OK') 27 | self.assertEqual(json.loads(result.response[0])[ 28 | 'message'], 'Unity test of message formatter') 29 | 30 | result = format_response(202) 31 | self.assertEqual(result.status, '202 ACCEPTED') 32 | self.assertEqual(json.loads(result.response[0])['message'], 'ok') 33 | 34 | result = format_response(404) 35 | self.assertEqual(result.status, '404 NOT FOUND') 36 | self.assertEqual(json.loads(result.response[0])[ 37 | 'message'], 'Request failed') 38 | 39 | def test_get_pagination(self): 40 | 41 | args = {'page_size': 10,'page_num': 1} 42 | req = {'headers': {'authorization': generate_token()},'args': args,'body': ''} 43 | 44 | page, per_page = get_pagination(Request(req)) 45 | self.assertEqual(page, 1) 46 | self.assertEqual(per_page, 10) 47 | 48 | with self.assertRaises(HTTPRequestError): 49 | args = {'page_size': 10,'page_num': 0} 50 | req = {'headers': {'authorization': generate_token()},'args': args,'body': ''} 51 | page, per_page = get_pagination(Request(req)) 52 | 53 | with self.assertRaises(HTTPRequestError): 54 | args = {'page_size': 0,'page_num': 2} 55 | req = {'headers': {'authorization': generate_token()},'args': args,'body': ''} 56 | page, per_page = get_pagination(Request(req)) 57 | 58 | def test_get_allowed_service(self): 59 | token = generate_token() 60 | 61 | with self.assertRaises(ValueError): 62 | get_allowed_service(None) 63 | 64 | result = get_allowed_service(token) 65 | self.assertIsNotNone(result) 66 | self.assertEqual(result, 'admin') 67 | 68 | with self.assertRaises(ValueError): 69 | get_allowed_service('Is.Not_A_Valid_Token') 70 | 71 | 72 | def test_decrypt(self): 73 | result = decrypt(b"\xa97\xa4o\xba\xddx\xe0\xe9\x8f\xe2\xc4V\x85\xf7'") 74 | self.assertEqual(result, b'') 75 | 76 | with self.assertRaises(ValueError): 77 | result = decrypt('12345678') 78 | 79 | def test_retrieve_auth_token(self): 80 | token = generate_token() 81 | req = {'headers': {'authorization': token},'args': '','body': ''} 82 | 83 | result = retrieve_auth_token(Request(req)) 84 | self.assertIsNotNone(result) 85 | self.assertEqual(result, token) 86 | 87 | with self.assertRaises(HTTPRequestError): 88 | req = {'headers': {},'args': '','body': ''} 89 | result = retrieve_auth_token(Request(req)) 90 | -------------------------------------------------------------------------------- /tests/token_test_generator.py: -------------------------------------------------------------------------------- 1 | import jwt 2 | 3 | 4 | def generate_token(): 5 | service = 'admin' 6 | encode_data = { 7 | 'userid': 1, 'name': 'Admin (superuser)', 'groups': [1], 'iat': 8 | 1517339633, 'exp': 1517340053, 'email': 'admin@noemail.com', 'profile': 9 | 'admin', 'iss': 'eGfIBvOLxz5aQxA92lFk5OExZmBMZDDh', 'service': service, 10 | 'jti': '7e3086317df2c299cef280932da856e5', 'username': 'admin' 11 | } 12 | 13 | jwt_token = jwt.encode(encode_data, 'secret', algorithm='HS256').decode() 14 | 15 | # Substitute Authorization with actual token 16 | auth = 'Bearer ' + jwt_token 17 | return auth 18 | --------------------------------------------------------------------------------