├── .github ├── ISSUE_TEMPLATE │ ├── BUG-REPORT.yml │ └── QUESTION.yml └── workflows │ └── build.yml ├── .gitignore ├── CONTRIBUTING.md ├── DOCKER_BUILD └── Dockerfile ├── Jenkinsfile ├── LICENSE ├── NOTICE ├── README.md ├── RELEASE_NOTES.md ├── build.gradle.kts ├── docs ├── normative_statements.md └── roadmap.md ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── release_notes_images ├── 3.0.0_coverage.png └── 3.0.0_webui.png ├── settings.gradle.kts ├── specification ├── .gitignore ├── Sparkplug_Converted.adoc ├── Sparkplug_Orig.adoc ├── build.gradle.kts ├── gradle.properties ├── gradlew ├── gradlew.bat ├── settings.gradle.kts └── src │ └── main │ ├── asciidoc │ ├── .asciidoctorconfig │ ├── assets │ │ ├── images │ │ │ └── extracted-media │ │ │ │ └── media │ │ │ │ ├── image3.png │ │ │ │ └── image4.png │ │ └── plantuml │ │ │ ├── HA-mqtt-server-cluster-with-load-balancer.puml │ │ │ ├── HA-mqtt-server-cluster.puml │ │ │ ├── edge-node-mqtt-session-establishment.puml │ │ │ ├── edge-node-ndata-and-ncmd-message-flow.puml │ │ │ ├── host-session-establishment.puml │ │ │ ├── infrastructure-components.puml │ │ │ ├── mqtt-device-session-establishment.puml │ │ │ ├── payload-metric-folder-structure.puml │ │ │ ├── primary-application-state-flow.puml │ │ │ ├── primary-host-application-state-flow-diagram.puml │ │ │ ├── simple-mqtt-infrastructure.puml │ │ │ ├── sparkplugb-metric-structure-1.puml │ │ │ └── sparkplugb-metric-structure-2.puml │ ├── chapters │ │ ├── .asciidoctorconfig │ │ ├── Sparkplug_10_Conformance.adoc │ │ ├── Sparkplug_1_Introduction.adoc │ │ ├── Sparkplug_2_Principles.adoc │ │ ├── Sparkplug_3_Components.adoc │ │ ├── Sparkplug_4_Topics.adoc │ │ ├── Sparkplug_5_Operational_Behavior.adoc │ │ ├── Sparkplug_6_Payloads.adoc │ │ ├── Sparkplug_7_Security.adoc │ │ ├── Sparkplug_8_HA.adoc │ │ ├── Sparkplug_9_Acknowledgements.adoc │ │ ├── Sparkplug_Appendix_A.adoc │ │ └── Sparkplug_Appendix_B.adoc │ ├── sparkplug_spec.adoc │ └── themes │ │ └── sparkplug-theme.yml │ └── xsl │ ├── normative-statements.xsl │ └── tck-audit.xsl └── tck ├── .gitignore ├── README.html ├── README.md ├── UserGuide.adoc ├── assertion_check.py ├── build.gradle.kts ├── eftckl-v10 ├── gradle.properties ├── gradlew ├── gradlew.bat ├── hivemq-configuration ├── config.xml └── logback.xml ├── package.py ├── package.sh ├── report.py ├── requirements.py ├── settings.gradle.kts ├── src └── main │ └── java │ └── org │ └── eclipse │ └── sparkplug │ ├── impl │ └── exception │ │ ├── SparkplugErrorCode.java │ │ ├── SparkplugException.java │ │ ├── SparkplugInvalidTypeException.java │ │ ├── SparkplugParsingException.java │ │ ├── json │ │ └── DataSetDeserializer.java │ │ ├── message │ │ ├── PayloadEncoder.java │ │ └── SparkplugBPayloadEncoder.java │ │ └── model │ │ ├── DataSet.java │ │ ├── DataSetDataType.java │ │ ├── DeviceDescriptor.java │ │ ├── EdgeNodeDescriptor.java │ │ ├── File.java │ │ ├── MessageType.java │ │ ├── MetaData.java │ │ ├── Metric.java │ │ ├── MetricDataType.java │ │ ├── Parameter.java │ │ ├── ParameterDataType.java │ │ ├── PropertyDataType.java │ │ ├── PropertySet.java │ │ ├── PropertyValue.java │ │ ├── Row.java │ │ ├── SparkplugBPayload.java │ │ ├── SparkplugDescriptor.java │ │ ├── SparkplugMeta.java │ │ ├── Template.java │ │ ├── Topic.java │ │ └── Value.java │ └── tck │ ├── ConnectInterceptor.java │ ├── DisconnectInterceptor.java │ ├── PublishInterceptor.java │ ├── SparkplugClientLifecycleEventListener.java │ ├── SparkplugClientLifecycleEventListenerProvider.java │ ├── SparkplugHiveMQExtension.java │ ├── SubscribeInterceptor.java │ ├── test │ ├── Monitor.java │ ├── Results.java │ ├── TCK.java │ ├── TCKTest.java │ ├── broker │ │ ├── AwareBrokerTest.java │ │ ├── CompliantBrokerTest.java │ │ └── test │ │ │ ├── BrokerAwareFeatureTester.java │ │ │ ├── BrokerConformanceFeatureTester.java │ │ │ └── results │ │ │ ├── AsciiCharsInClientIdTestResults.java │ │ │ ├── AwareTestResult.java │ │ │ ├── ClientIdLengthTestResults.java │ │ │ ├── ComplianceTestResult.java │ │ │ ├── PayloadTestResults.java │ │ │ ├── QosTestResult.java │ │ │ ├── SharedSubscriptionTestResult.java │ │ │ ├── TopicLengthTestResults.java │ │ │ ├── TopicUtils.java │ │ │ ├── Tuple.java │ │ │ └── WildcardSubscriptionsTestResult.java │ ├── common │ │ ├── Constants.java │ │ ├── HostUtils.java │ │ ├── PersistentUtils.java │ │ ├── SparkplugBProto.java │ │ ├── StatePayload.java │ │ └── Utils.java │ ├── edge │ │ ├── MultipleBrokerTest.java │ │ ├── PrimaryHostTest.java │ │ ├── ReceiveCommandTest.java │ │ ├── SendComplexDataTest.java │ │ ├── SendDataTest.java │ │ ├── SessionEstablishmentTest.java │ │ └── SessionTerminationTest.java │ └── host │ │ ├── EdgeSessionTerminationTest.java │ │ ├── MessageOrderingTest.java │ │ ├── MultipleBrokerTest.java │ │ ├── SendCommandTest.java │ │ ├── SessionEstablishmentTest.java │ │ └── SessionTerminationTest.java │ └── utility │ ├── EdgeNode.java │ └── HostApplication.java └── webconsole ├── .editorconfig ├── .gitignore ├── README.md ├── assets ├── EclipseLogo.png ├── README.md └── SparkplugLogo.png ├── components ├── MqttConnect.vue ├── Navbar.vue ├── README.md ├── Sparkplug │ ├── Broker.vue │ ├── ClientConnect.vue │ ├── EoNNode.vue │ ├── HostApplication.vue │ └── Logo.vue ├── Tck │ ├── AllTests.vue │ ├── Logging.vue │ ├── Test.vue │ ├── TestResultSetup.vue │ ├── Tests.vue │ └── TestsInformation.vue └── WebConsole │ ├── Information.vue │ └── Popup.vue ├── layouts ├── README.md └── default.vue ├── middleware └── README.md ├── nuxt.config.js ├── package.json ├── pages ├── README.md └── index.vue ├── plugins └── README.md ├── prettier.config.js ├── static ├── README.md ├── bootstrap-vue-icons.css.map ├── bootstrap-vue.css.map ├── bootstrap.css.map └── favicon.ico ├── store └── README.md └── yarn.lock /.github/ISSUE_TEMPLATE/BUG-REPORT.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report a bug in the Sparkplug TCK 3 | title: "Bug Report: [ADD DETAILS HERE]" 4 | labels: ["bugreport", "triage"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to fill out this bug report 10 | - type: textarea 11 | id: what-happened 12 | attributes: 13 | label: What happened? 14 | description: Also, what did you expect to happen? 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: product-name 19 | attributes: 20 | label: What is the product or software this issue was discovered with? 21 | validations: 22 | required: true 23 | - type: textarea 24 | id: repro-steps 25 | attributes: 26 | label: What exact steps need to be performed to reproduce the problem? 27 | validations: 28 | required: true 29 | - type: textarea 30 | id: listing-issue 31 | attributes: 32 | label: Is this related to a Sparkplug Listing request? If so, link the issue from https://github.com/eclipse-sparkplug/sparkplug.listings here. 33 | - type: dropdown 34 | id: tck-version 35 | attributes: 36 | label: Version 37 | description: What version of the Sparkplug TCK are you running? 38 | options: 39 | - 3.0.0 (Default) 40 | validations: 41 | required: true 42 | - type: checkboxes 43 | id: terms-eftl 44 | attributes: 45 | label: Accept EFTL Terms 46 | description: By checking this box I acknowledge that the organization I represent accepts the terms of the [EFTL] (https://www.eclipse.org/legal/tck.php). 47 | options: 48 | - label: I agree to the terms of EFTL 49 | required: true 50 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/QUESTION.yml: -------------------------------------------------------------------------------- 1 | name: Question 2 | description: Ask a question about Sparkplug or the Sparkplug TCK 3 | title: "Question: [ADD SUMMARY HERE]" 4 | labels: ["question", "triage"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for your interest in Sparkplug! 10 | - type: textarea 11 | id: question 12 | attributes: 13 | label: What do you want to know? 14 | description: Be as precise as possible 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: listing-issue 19 | attributes: 20 | label: Is this related to a Sparkplug Listing request? If so, link the issue from https://github.com/eclipse-sparkplug/sparkplug.listings here. 21 | - type: dropdown 22 | id: tck-version 23 | attributes: 24 | label: Version 25 | description: If this is related to the TCK, what version of the Sparkplug TCK are you running? 26 | options: 27 | - 3.0.0 (Default) 28 | - type: checkboxes 29 | id: terms-eftl 30 | attributes: 31 | label: Accept EFTL Terms 32 | description: By checking this box I acknowledge that the organization I represent accepts the terms of the [EFTL] (https://www.eclipse.org/legal/tck.php). 33 | options: 34 | - label: I agree to the terms of EFTL 35 | required: true 36 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | on: 3 | push: {} 4 | pull_request: {} 5 | jobs: 6 | 7 | gradleValidation: 8 | name: Gradle Wrapper 9 | runs-on: ubuntu-latest 10 | steps: 11 | 12 | - name: Fetch Sources 13 | uses: actions/checkout@v3 14 | 15 | - name: Gradle Wrapper Validation 16 | uses: gradle/wrapper-validation-action@v1 17 | 18 | build: 19 | name: Build 20 | needs: gradleValidation 21 | runs-on: ubuntu-latest 22 | steps: 23 | 24 | - name: Fetch Sources 25 | uses: actions/checkout@v3 26 | 27 | - name: Setup Java 28 | uses: actions/setup-java@v3 29 | with: 30 | cache: 'gradle' 31 | java-version: 11 32 | distribution: 'temurin' 33 | 34 | - name: Setup Gradle Wrapper Cache 35 | uses: actions/cache@v3 36 | with: 37 | path: ~/.gradle/wrapper 38 | key: ${{ runner.os }}-gradle-wrapper-${{ hashFiles('**/gradle/wrapper/gradle-wrapper.properties') }} 39 | 40 | - name: Build Plugin 41 | run: ./gradlew build 42 | 43 | - name: Upload specification artifact 44 | uses: actions/upload-artifact@v3 45 | with: 46 | name: sparkplug_spec 47 | path: specification/build/docs/pdf/sparkplug_spec.pdf 48 | if-no-files-found: error 49 | 50 | - name: Upload coverage artifact 51 | uses: actions/upload-artifact@v3 52 | with: 53 | name: coverage-report 54 | path: tck/build/coverage-report/**/* 55 | if-no-files-found: error 56 | 57 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | .project 3 | .mvn 4 | SparkplugTCKClient* 5 | SparkplugTCKedge* 6 | SparkplugTCKdevice* 7 | tck/src/license/THIRD-PARTY.properties 8 | .gradle 9 | build/ 10 | target/ 11 | specification/src/main/asciidoc/docinfo-header.html 12 | .settings 13 | Sparkplug_TCK_*.zip 14 | tck/Sparkplug_TCK_*.zip 15 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Sparkplug Specification Project 2 | 3 | Thanks for your interest in this project. 4 | 5 | ## Project description 6 | 7 | Define an MQTT Topic Namespace 8 | 9 | * https://projects.eclipse.org/projects/iot.sparkplug 10 | 11 | ## Developer resources 12 | 13 | Information regarding source code management, builds, coding standards, and 14 | more. 15 | 16 | * https://projects.eclipse.org/projects/iot.sparkplug/developer 17 | 18 | The project maintains the following source code repositories 19 | 20 | * https://github.com/eclipse/sparkplug 21 | 22 | ## Eclipse Development Process 23 | 24 | This Eclipse Foundation open project is governed by the Eclipse Foundation 25 | Development Process and operates under the terms of the Eclipse IP Policy. 26 | 27 | ## Specifications 28 | 29 | This specification project operates under the terms of the Eclipse Foundation 30 | Specification process. 31 | 32 | * https://eclipse.org/projects/dev_process 33 | * https://www.eclipse.org/org/documents/Eclipse_IP_Policy.pdf 34 | * https://www.eclipse.org/projects/efsp/ 35 | * https://www.eclipse.org/legal/efsp_non_assert.php 36 | 37 | ## Eclipse Contributor Agreement 38 | 39 | Before your contribution can be accepted by the project team contributors must 40 | electronically sign the Eclipse Contributor Agreement (ECA). 41 | 42 | * http://www.eclipse.org/legal/ECA.php 43 | 44 | Commits that are provided by non-committers must have a Signed-off-by field in 45 | the footer indicating that the author is aware of the terms by which the 46 | contribution has been provided to the project. The non-committer must 47 | additionally have an Eclipse Foundation account and must have a signed Eclipse 48 | Contributor Agreement (ECA) on file. 49 | 50 | For more information, please see the Eclipse Committer Handbook: 51 | https://www.eclipse.org/projects/handbook/#resources-commit 52 | 53 | ## Contact 54 | 55 | Contact the project developers via the project's "dev" list. 56 | 57 | * https://accounts.eclipse.org/mailing-list/sparkplug-dev 58 | -------------------------------------------------------------------------------- /DOCKER_BUILD/Dockerfile: -------------------------------------------------------------------------------- 1 | #******************************************************************************* 2 | # Copyright (c) 2020 Eclipse Foundation and others. 3 | # This program and the accompanying materials are made available 4 | # under the terms of the Eclipse Public License 2.0 5 | # which is available at http://www.eclipse.org/legal/epl-v20.html, 6 | # or the MIT License which is available at https://opensource.org/licenses/MIT. 7 | # SPDX-License-Identifier: EPL-2.0 OR MIT 8 | #******************************************************************************* 9 | FROM eclipsecbi/jiro-agent-centos-8-jdk11 AS builder 10 | 11 | USER root 12 | 13 | RUN dnf install -y java-11-openjdk-devel \ 14 | graphviz \ 15 | asciidoctor \ 16 | ruby \ 17 | ruby-devel \ 18 | rubygems \ 19 | asciidoc \ 20 | xmlto \ 21 | nano \ 22 | gpg \ 23 | pinentry \ 24 | python3 25 | 26 | RUN wget https://services.gradle.org/distributions/gradle-7.6-bin.zip -P /tmp 27 | RUN mkdir /opt/gradle 28 | RUN unzip -d /opt/gradle /tmp/gradle-7.6-bin.zip 29 | RUN echo "export PATH=$PATH:/opt/gradle/gradle-7.6/bin" >> /home/jenkins/.bashrc 30 | 31 | USER 10001 32 | -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | agent { 3 | kubernetes { 4 | label 'sparkplug-agent-pod' 5 | yaml """ 6 | apiVersion: v1 7 | kind: Pod 8 | spec: 9 | containers: 10 | - name: sparkplug-build 11 | image: cirruslink/sparkplug-build:latest 12 | command: 13 | - cat 14 | tty: true 15 | resources: 16 | limits: 17 | memory: "4Gi" 18 | cpu: "1" 19 | requests: 20 | memory: "4Gi" 21 | cpu: "1" 22 | - name: jnlp 23 | volumeMounts: 24 | - mountPath: "/home/jenkins/.gnupg" 25 | name: "jenkins-home-gnupg" 26 | readOnly: false 27 | volumes: 28 | - name: "jenkins-home-gnupg" 29 | emptyDir: 30 | medium: "" 31 | """ 32 | } 33 | } 34 | 35 | stages { 36 | stage('build') { 37 | steps { 38 | container('sparkplug-build') { 39 | sh 'Xvfb :0 -screen 0 1600x1200x16 & export DISPLAY=:0' 40 | sh 'GRADLE_USER_HOME="/home/jenkins/.gradle" ./gradlew -Dorg.gradle.jvmargs="-Xmx1536m -Xms64m -Dfile.encoding=UTF-8 -Djava.awt.headless=true" clean build' 41 | } 42 | } 43 | } 44 | 45 | stage('sign') { 46 | steps { 47 | withCredentials([ 48 | [$class: 'FileBinding', credentialsId: 'secret-subkeys.asc', variable: 'KEYRING'], 49 | [$class: 'StringBinding', credentialsId: 'gpg-passphrase', variable: 'KEYRING_PASSPHRASE'] 50 | ]) { 51 | sh ''' 52 | curl -o tck/build/hivemq-extension/sparkplug-tck-4.0.0-signed.jar -F file=@tck/build/hivemq-extension/sparkplug-tck-4.0.0.jar https://cbi.eclipse.org/jarsigner/sign 53 | export GPG_TTY=/dev/console 54 | 55 | gpg --batch --import "${KEYRING}" 56 | for fpr in $(gpg --list-keys --with-colons | awk -F: \'/fpr:/ {print $10}\' | sort -u); do echo -e "5\ny\n" | gpg --batch --command-fd 0 --expert --edit-key ${fpr} trust; done 57 | 58 | mkdir tck/build/hivemq-extension/working_tmp 59 | cd tck/build/hivemq-extension/working_tmp 60 | unzip ../sparkplug-tck-4.0.0.zip 61 | mv ../sparkplug-tck-4.0.0-signed.jar sparkplug-tck/sparkplug-tck-4.0.0.jar 62 | zip -r ../sparkplug-tck-4.0.0.zip sparkplug-tck 63 | cd .. 64 | gpg -v --no-tty --passphrase "${KEYRING_PASSPHRASE}" -c --batch sparkplug-tck-4.0.0.zip 65 | 66 | echo "no-tty" >> ~/.gnupg/gpg.conf 67 | gpg -vvv --no-permission-warning --output "sparkplug-tck-4.0.0.zip.sig" --batch --yes --pinentry-mode=loopback --passphrase="${KEYRING_PASSPHRASE}" --no-tty --detach-sig sparkplug-tck-4.0.0.zip 68 | cd ../../ 69 | ./package.sh 70 | gpg -vvv --no-permission-warning --output "Eclipse-Sparkplug-TCK-4.0.0.zip.sig" --batch --yes --pinentry-mode=loopback --passphrase="${KEYRING_PASSPHRASE}" --no-tty --detach-sig Eclipse-Sparkplug-TCK-4.0.0.zip 71 | gpg -vvv --verify Eclipse-Sparkplug-TCK-4.0.0.zip.sig 72 | ''' 73 | } 74 | } 75 | } 76 | 77 | stage('upload') { 78 | steps { 79 | sshagent(credentials: ['projects-storage.eclipse.org-bot-ssh']) { 80 | sh ''' 81 | ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes genie.sparkplug@projects-storage.eclipse.org rm -rf /home/data/httpd/download.eclipse.org/sparkplug/4.0.0/* 82 | ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes genie.sparkplug@projects-storage.eclipse.org mkdir -p /home/data/httpd/download.eclipse.org/sparkplug/4.0.0 83 | scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes tck/Eclipse-Sparkplug-TCK-4.0.0.zip genie.sparkplug@projects-storage.eclipse.org:/home/data/httpd/download.eclipse.org/sparkplug/4.0.0/ 84 | scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes tck/Eclipse-Sparkplug-TCK-4.0.0.zip.sig genie.sparkplug@projects-storage.eclipse.org:/home/data/httpd/download.eclipse.org/sparkplug/4.0.0/ 85 | ''' 86 | } 87 | } 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | # Notices for Sparkplug Specification Project 2 | 3 | This content is produced and maintained by the Sparkplug Specification Project 4 | project. 5 | 6 | * Project home: https://projects.eclipse.org/projects/iot.sparkplug 7 | 8 | ## Trademarks 9 | 10 | Sparkplug Specification Project is a trademark of the Eclipse Foundation. 11 | 12 | ## Copyright 13 | 14 | All content is the property of the respective authors or their employers. For 15 | more information regarding authorship of content, please consult the listed 16 | source code repository logs. 17 | 18 | ## Declared Project Licenses 19 | 20 | This program and the accompanying materials are made available under the terms 21 | of the Eclipse Public License v. 2.0 which is available at 22 | https://www.eclipse.org/legal/epl-2.0. 23 | 24 | SPDX-License-Identifier: EPL-2.0 25 | 26 | ## Source Code 27 | 28 | The project maintains the following source code repositories: 29 | 30 | * https://github.com/eclipse-sparkplug/sparkplug 31 | 32 | ## Third-party Content 33 | 34 | This project leverages the following third party content. 35 | 36 | None 37 | 38 | ## Cryptography 39 | 40 | Content may contain encryption software. The country in which you are currently 41 | may have restrictions on the import, possession, and use, and/or re-export to 42 | another country, of encryption software. BEFORE using any encryption software, 43 | please check the country's laws, regulations and policies concerning the import, 44 | possession, or use, and re-export of encryption software, to see if this is 45 | permitted. 46 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sparkplug 2 | 3 | Sparkplug®, Sparkplug Compatible, and the Sparkplug Logo are trademarks of the Eclipse Foundation. 4 | 5 | Sparkplug is a specification for MQTT enabled devices and applications to send and receive messages in a stateful way. 6 | While MQTT is stateful by nature it doesn't ensure that all data on a receiving MQTT application is current or valid. 7 | Sparkplug provides a mechanism for ensuring that remote device or application data is current and valid. 8 | 9 | Sparkplug A was the original version of the Sparkplug specification and used Eclipse Kura's protobuf definition for 10 | payload encoding. However, it was quickly determined that this definition was too limited to handle the metadata that 11 | typical Sparkplug payloads require. As a result, Sparkplug B was developed to add additional features and capabilities 12 | that were not possible in the original Kura payload definition. These features include: 13 | * Complex data types using templates 14 | * Datasets 15 | * Richer metrics with the ability to add property metadata for each metric 16 | * Metric alias support to maintain rich metric naming while keeping bandwidth usage to a minimum 17 | * Historical data 18 | * File data 19 | 20 | Sparkplug Specification v3.0.0: https://www.eclipse.org/tahu/spec/sparkplug_spec.pdf 21 | 22 | # Eclipse Tahu 23 | 24 | Eclipse Tahu provide client libraries and compatible implementations in various languages and for various devices 25 | to show how the device/remote application must connect and disconnect from the MQTT server using the Sparkplug 26 | specification explained below. This includes device lifecycle messages such as the required birth and last will & 27 | testament messages that must be sent to ensure the device lifecycle state and data integrity. 28 | 29 | Eclipse Tahu Project: https://projects.eclipse.org/projects/iot.tahu 30 | 31 | Eclipse Tahu Github Repository: https://github.com/eclipse/tahu 32 | 33 | Eclipse Tahu Binaries in Maven Central: https://search.maven.org/search?q=g:org.eclipse.tahu 34 | 35 | # Contributing 36 | * Make sure you submit your PR against the correct branch 37 | - Submit PRs against 'develop' for changes to the Sparkplug 4.0.0 specification and TCK 38 | - Submit PRs againt '3.x' for changes to the Sparkplug 3.0.0 specification and TCK 39 | Contributing to the Sparkplug Specification is easy and contributions are welcome. In order to submit a pull request (PR) you must follow these steps. Failure to follow these steps will likely lead to the PR being rejected. 40 | 1. Sign the Eclipse Contributor Agreement (ECA): https://accounts.eclipse.org/user/eca 41 | 2. Make sure the email tied to your Github account is the same one you used to sign the ECA. 42 | 3. Submit your PR against the appropriate branch of the repository. PRs against master will not be accepted. 43 | 4. Sign off on your PR using the '-s' flag. For example: 'git commit -m"My brief comment" ChangedFile' 44 | 5. Make sure to include any important context or information associated with the PR in the PR submission. Keep your commit comment brief. 45 | -------------------------------------------------------------------------------- /RELEASE_NOTES.md: -------------------------------------------------------------------------------- 1 | # Eclipse Sparkplug v3.0.0 2 | 3 | ## Sparkplug Specification 4 | * Formalized the previous version (v2.2) of the Sparkplug Specification 5 | * Created 299 total assertions (298 testable) in the v3.0.0 version of the Sparkplug Specification 6 | * Improved, clarified, and expanded on concepts in the Sparkplug Specification 7 | * Implemented in Asciidoc for better version control going forward 8 | * Converted all images to PlantUML for better version control going forward 9 | * Used annotations in the specification to track which assertions are tested by the TCK 10 | * Incorporated tooling to output HTML and PDF versions of the specification as part of the build 11 | * Modified the Host Application STATE message topic and payload to eliminate potential 'stranded' Edge Nodes 12 | 13 | ## Sparkplug Technology Compatibility Kit (TCK) 14 | * Created the base framework for the TCK 15 | * Incorprated a 'coverage report' to show which assertions in the spec are covered by the TCK 16 | ![alt text](https://github.com/eclipse/sparkplug/raw/develop/release_notes_images/3.0.0_coverage.png) 17 | * Created an interactive web UI for using the TCK 18 | ![alt text](https://github.com/eclipse/sparkplug/raw/develop/release_notes_images/3.0.0_webui.png) 19 | * Added automated tests for all 298 testable assertions in the specification 20 | * Added output reporting for users when exercising the TCK 21 | -------------------------------------------------------------------------------- /build.gradle.kts: -------------------------------------------------------------------------------- 1 | tasks.create("build") { 2 | group = "build" 3 | dependsOn(gradle.includedBuild("specification").task(":build"), 4 | gradle.includedBuild("tck").task(":build")) 5 | } 6 | 7 | tasks.create("clean") { 8 | group = "build" 9 | dependsOn(gradle.includedBuild("specification").task(":clean"), 10 | gradle.includedBuild("tck").task(":clean")) 11 | } 12 | -------------------------------------------------------------------------------- /docs/normative_statements.md: -------------------------------------------------------------------------------- 1 | ## Definitions 2 | * The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in RFC 2119. RFC 2119: https://tools.ietf.org/html/rfc2119 3 | 4 | ## Sparkplug Topics 5 | * All Sparkplug topics MUST be of the following forms 6 | * [Namespace Token]/[Group ID]/[Sparkplug Verb]/[Edge Node ID] 7 | * [Namespace Token]/[Group ID]/[Sparkplug Verb]/[Edge Node ID]/[Device ID] 8 | * STATE/[Primary Host ID] 9 | 10 | ## Sparkplug Topic Tokens 11 | * Primary Host ID: The UTF-8 identifier for the primary host client. 12 | * Namespace Token: The first MQTT topic token MUST always be spAv1.0 or spBv1.0 with the exception of STATE messages. This denotes the payload encoding. 13 | * Group ID: This MUST be included as the second topic token for every non-STATE topic 14 | * Edge Node ID: This MUST be included as the fourth topic token for every non-STATE topic 15 | * Device ID: This MUST be included as the fifth topic token for any non-STATE message where the Sparkplug Verb is DBIRTH, DDEATH, DDATA, or DCMD. It MUST not be included if the Sparkplug Verb is NBIRTH, NDEATH, NDATA, or NCMD. 16 | * Sparkplug Verb: This MUST be included as the third topic token for every non-STATE topic and MUST be one of the following: NBIRTH, DBIRTH, NDEATH, DDEATH, NDATA, DDATA, NCMD, or DCMD. 17 | 18 | ## Sparkplug Host Client 19 | * MUST subscribe to NBIRTH and NDEATH messages 20 | * SHOULD subscribe to DBIRTH, DDEATH, NDATA, and DDATA messages 21 | * MAY publish NCMD and DCMD messages to the MQTT Server 22 | * MAY publish STATE messages - If it does, it is a 'Sparkplug Primary Host Client' and MUST follow the rules of the STATE topics 23 | 24 | ## Sparkplug Primary Host Client 25 | * There MUST not be more than one Sparkplug Primary Host Client connected to any MQTT Server 26 | * An MQTT 'Will Message' must be registered with the STATE topic. It MUST have a payload with the UTF-8 string 'OFFLINE', use QoS1, and MUST set the MQTT retain flag to true 27 | * The STATE message MUST be published after the MQTT CONNACK packet is received with a 'Connection Accepted' response. The payload MUST be an UTF-8 string with the value of 'ONLINE', it MUST use QoS1, and MUST set the MQTT retain flag to true. 28 | 29 | ## Sparkplug Edge Client 30 | * MUST publish an NBIRTH message after connecting to the MQTT Server and before publishing any other messages 31 | * MUST register an MQTT Will topic with the topic '[Namespace Token]/[Group ID]/NDEATH/[Edge Node ID]', MQTT retain=false, and MQTT QoS=0. It MUST also include a non-null payload with a metric with name=bdSeq and a value that matches the 32 | pending bdSeq number metric that will be published in pending NBIRTH message 33 | * SHOULD publish DBIRTH, NDATA, NDEATH, DDEATH, and DDATA messages 34 | * MAY subscribe to NCMD and DCMD messages 35 | * MAY subscribe to STATE messages 36 | * Each Sparkplug edge client in the infrastructure MUST have a unique combination of Sparkplug Group ID and Edge Node ID 37 | 38 | ## Sparkplug Client 39 | * This is any Sparkplug Edge Client, Sparkplug Host Client, or Sparkplug Primary Host Client 40 | 41 | ## Payloads 42 | * When using the spBv1.0 'Namespace Token' in the topic the payload MUST be Google Protobuf encoded and use the protofile from here: https://github.com/eclipse/tahu/blob/master/sparkplug_b/sparkplug_b.proto 43 | * LOTS MORE TO ADD HERE 44 | 45 | ## Quality of Service (QoS) 46 | * All STATE messages published by Sparkplug Primary Host Clients MUST be published on Q0S1 including the MQTT Will message that is registered in the MQTT CONNECT packet 47 | * All non-STATE messages from any Sparkplug Client MUST be published on QoS0 48 | 49 | ## Retained Messages 50 | * All STATE messages MUST be published with the MQTT 'retain flag' set to true 51 | * All non-STATE messages MUST be published with the MQTT 'retain flag' set to false 52 | 53 | ## MQTT Will Messages 54 | * Sparkplug Primary Host Clients MUST register an MQTT Will message with the topic 'STATE/[Primary Host ID]', a payload of an UTF-8 string 'OFFLINE', MQTT retain=true, and QoS=1 55 | * Sparkplug Host Clients that are not the Sparkplug Primary Host Clients MUST NOT register an MQTT Will message 56 | * Sparkplug Edge Clients MUST register an MQTT Will message with the topic: '[Namespace Token]/[Group ID]/[Sparkplug Verb]/[Edge Node ID]' 57 | * MORE TO ADD HERE 58 | 59 | ## Clean Session 60 | * The MQTT clean session flag MUST always be set to true for all Sparkplug clients 61 | -------------------------------------------------------------------------------- /docs/roadmap.md: -------------------------------------------------------------------------------- 1 | ## High Priority 2 | TBD 3 | 4 | ## Med Priority 5 | TBD 6 | 7 | ## Low Priority 8 | TBD 9 | 10 | ## Not Prioritized 11 | * Add RECORD support 12 | * Add supplemental BIRTH support 13 | * Add support for 'secondary hosts' 14 | * Add MQTT v5 support 15 | 16 | ## Vote Tracking or methodology for prioritization? 17 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /release_notes_images/3.0.0_coverage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/release_notes_images/3.0.0_coverage.png -------------------------------------------------------------------------------- /release_notes_images/3.0.0_webui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/release_notes_images/3.0.0_webui.png -------------------------------------------------------------------------------- /settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "sparkplug" 2 | 3 | includeBuild("specification") 4 | includeBuild("tck") 5 | -------------------------------------------------------------------------------- /specification/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | build/ 3 | .gradle/ 4 | .DS_Store 5 | -------------------------------------------------------------------------------- /specification/gradle.properties: -------------------------------------------------------------------------------- 1 | version=3.0.0 2 | # 3 | # main props 4 | # 5 | org.gradle.jvmargs=-Xmx2048m 6 | org.gradle.daemon=false 7 | file.encoding=utf-8 8 | # 9 | # plugins 10 | # 11 | plugin.asciidoctor.version=3.3.2 12 | plugin.asciidoctor.pdf.version=2.3.3 13 | -------------------------------------------------------------------------------- /specification/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /specification/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "specification" 2 | 3 | pluginManagement { 4 | repositories { 5 | gradlePluginPortal() 6 | } 7 | plugins { 8 | id("org.asciidoctor.jvm.base") version "${extra["plugin.asciidoctor.version"]}" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/.asciidoctorconfig: -------------------------------------------------------------------------------- 1 | // provide hints were to find images when rendering in an IDE 2 | // see: https://intellij-asciidoc-plugin.ahus1.de/docs/users-guide/features/advanced/asciidoctorconfig-file.html 3 | :imagesdir: assets/images 4 | :assetsdir: {asciidoctorconfigdir}/ 5 | :figure-caption!: 6 | :icons: font 7 | :listing-caption: Listing 8 | :compat-mode: 9 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/images/extracted-media/media/image3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/specification/src/main/asciidoc/assets/images/extracted-media/media/image3.png -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/images/extracted-media/media/image4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/specification/src/main/asciidoc/assets/images/extracted-media/media/image4.png -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/HA-mqtt-server-cluster-with-load-balancer.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | /' 3 | ' Unfortunately, the AsciiDoctor PlantUML plugin dfoes not invoke the PlantUMP pre-processor. 4 | ' The consequence is that we have lots of redundant markup instead of clean procedures below. 5 | '/ 6 | hide stereotype 7 | skinparam linetype polyline 8 | skinparam nodesep 70 9 | skinparam ranksep 50 10 | skinparam defaultTextAlignment center 11 | skinparam monochrome true 12 | 13 | skinparam rectangle { 14 | BackgroundColor #white 15 | } 16 | 17 | skinparam rectangle<> { 18 | BackgroundColor #lightgrey 19 | } 20 | 21 | skinparam rectangle<> { 22 | BackgroundColor #darkgrey 23 | Roundcorner 50 24 | } 25 | 26 | 27 | skinparam rectangle<> { 28 | BackgroundColor #white 29 | BorderThickness 0 30 | BorderColor transparent 31 | FontSize 28 32 | Shadowing false 33 | } 34 | 35 | 36 | together { 37 | rectangle EdgeNode [ 38 | "" "" 39 | ===""MQTT Edge"" 40 | ==="" Node "" 41 | "" "" 42 | ] 43 | rectangle Device [ 44 | "" "" 45 | ===""MQTT Enabled"" 46 | ===""Device"" 47 | "" "" 48 | ] 49 | rectangle PrimaryHost [ 50 | "" "" 51 | ===""Primary Host"" 52 | ===""Application"" 53 | "" "" 54 | ] 55 | } 56 | 57 | 'together { 58 | 'rectangle "Load Balancer" <> as LoadBalancerLabel 59 | rectangle LoadBalancer<> [ 60 | ==="" "" 61 | ==="" Load Balancer "" 62 | ==="" "" 63 | ] 64 | '} 65 | 66 | together { 67 | rectangle "MQTT Server" <> as MQTTServerLabel 68 | rectangle MQTTNode1<> [ 69 | ==="" "" 70 | ] 71 | rectangle MQTTNode2<> [ 72 | ==="" "" 73 | ] 74 | rectangle MQTTNode3<> [ 75 | ==="" "" 76 | ] 77 | } 78 | 79 | '[LoadBalancer] -[hidden]down- [LoadBalancerLabel] 80 | 81 | [Device] -[hidden]up- [PrimaryHost] 82 | [Device] -[hidden]down- [EdgeNode] 83 | 84 | [Device] <-left-> [LoadBalancer] 85 | [EdgeNode] <-up-> [LoadBalancer] 86 | [PrimaryHost] <-down-> [LoadBalancer] 87 | 88 | [MQTTNode3] -[hidden]right- [MQTTServerLabel] 89 | [MQTTNode1] -down- [MQTTNode2] 90 | [MQTTNode1] -right- [MQTTNode3] 91 | [MQTTNode2] -down- [MQTTNode3] 92 | 93 | [MQTTNode1] <-left--> [LoadBalancer] 94 | [MQTTNode2] <-right-> [LoadBalancer] 95 | [MQTTNode3] <-right-> [LoadBalancer] 96 | 97 | 98 | 99 | @enduml 100 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/HA-mqtt-server-cluster.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | /' 3 | ' The "left to right direction" directive below changes how the engine renders the diagram. 4 | ' 5 | ' Since the default layout is "top to bottom", we need to specify directions that take into account 6 | ' the global change in orientation. So, below, "right" means "up" and "up" means "left". 7 | ' 8 | ' The order of the associations also influences the rendering order. 9 | ' 10 | ' Unfortunately, the AsciiDoctor PlantUML plugin dfoes not invoke the PlantUMP pre-processor. 11 | ' The consequence is that we have lots of redundant markup instead of clean procedures below. 12 | '/ 13 | left to right direction 14 | hide stereotype 15 | skinparam linetype polyline 16 | skinparam nodesep 40 17 | skinparam ranksep 20 18 | skinparam defaultTextAlignment center 19 | skinparam monochrome true 20 | 21 | skinparam rectangle { 22 | BackgroundColor #white 23 | } 24 | 25 | skinparam rectangle<> { 26 | BackgroundColor #lightgrey 27 | } 28 | 29 | skinparam rectangle<> { 30 | BackgroundColor #white 31 | BorderThickness 0 32 | BorderColor transparent 33 | FontSize 28 34 | Shadowing false 35 | } 36 | 37 | 38 | together { 39 | rectangle "MQTT Server" <> as MQTTServerLabel 40 | rectangle EdgeNodeMQTTNode<> [ 41 | ==="" "" 42 | ] 43 | rectangle DeviceMQTTNode<> [ 44 | ==="" "" 45 | ] 46 | rectangle PrimaryHostMQTTNode<> [ 47 | ==="" "" 48 | ] 49 | } 50 | 51 | together { 52 | rectangle EdgeNode [ 53 | "" "" 54 | ===""MQTT Edge"" 55 | ==="" Node "" 56 | "" "" 57 | ] 58 | rectangle Device [ 59 | "" "" 60 | ===""MQTT Enabled"" 61 | ===""Device"" 62 | "" "" 63 | ] 64 | } 65 | 66 | rectangle PrimaryHost [ 67 | "" "" 68 | ===""Primary Host"" 69 | ===""Application"" 70 | "" "" 71 | ] 72 | 73 | [EdgeNodeMQTTNode]--left- [DeviceMQTTNode] 74 | [DeviceMQTTNode] --down- [PrimaryHostMQTTNode] 75 | [EdgeNodeMQTTNode] --down- [PrimaryHostMQTTNode] 76 | 77 | 78 | [PrimaryHostMQTTNode] <--down--> [PrimaryHost] 79 | 80 | [DeviceMQTTNode] <--up--> [Device] 81 | 82 | [EdgeNodeMQTTNode] <--up--> [EdgeNode] 83 | 84 | 85 | @enduml 86 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/edge-node-mqtt-session-establishment.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | participant "Primary Host Application" as PrimApp #lightblue 3 | database "MQTT Server" as Server #lightblue 4 | participant "Sparkplug Edge Node" as EdgeNode #lightblue 5 | 6 | activate PrimApp 7 | activate Server 8 | 9 | 10 | autonumber 1 "(#)" 11 | Server <- EdgeNode : CONNECT 12 | 13 | ||| 14 | 15 | Server <- EdgeNode : SUBSCRIBE( **NCMD, DCMD, STATE** ) 16 | 17 | Server <- EdgeNode : PUBLISH( **NBIRTH** ) 18 | activate EdgeNode 19 | 20 | note left 21 | Create/update Edge Node 22 | folder and metrics. 23 | Create and update all 24 | metrics and set data 25 | quality to 'GOOD'. 26 | end note 27 | 28 | note right of EdgeNode 29 | Sparkplug Node Session 30 | established. Current 'STATE' 31 | is ONLINE. 32 | end note 33 | 34 | ... (normal operation) ... 35 | ||| 36 | 37 | Server <--> EdgeNode : "Loss of Connection" 38 | autonumber stop 39 | deactivate EdgeNode 40 | 41 | [<-- Server : Death Certificate 42 | 43 | note left of Server 44 | All Edge Node metrics set 45 | to 'STALE'. OFFLINE 46 | time and metrics updated. 47 | end note 48 | 49 | note right of EdgeNode 50 | Sparkplug Edge Node 51 | Session terminated. 52 | Current 'STATE' is 53 | OFFLINE. 54 | end note 55 | 56 | ||| 57 | 58 | Server <- EdgeNode : CONNECT 59 | 60 | Server <- EdgeNode : SUBSCRIBE( **NCMD, DCMD, STATE** ) 61 | 62 | ||| 63 | Server <- EdgeNode : PUBLISH( **NBIRTH** ) 64 | activate EdgeNode 65 | note left 66 | All Edge Node metrics set 67 | to 'GOOD'. OFFLINE 68 | time and metrics updated. 69 | end note 70 | 71 | 72 | note right of EdgeNode 73 | Sparkplug Edge Node Session 74 | reestablished. Current 'STATE' 75 | is ONLINE. 76 | end note 77 | 78 | ... (normal operation) ... 79 | ||| 80 | @enduml -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/edge-node-ndata-and-ncmd-message-flow.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | participant "Primary Host Application" as PrimApp #lightblue 3 | database "MQTT Server" as Server #lightblue 4 | participant "Sparkplug Edge Node" as EdgeNode #lightblue 5 | 6 | activate PrimApp 7 | activate Server 8 | activate EdgeNode 9 | 10 | 11 | autonumber 1 "(#)" 12 | PrimApp <--> Server 13 | autonumber 1 "(#)" 14 | Server <--> EdgeNode 15 | 16 | PrimApp <--> Server 17 | 18 | 19 | Server <--> EdgeNode 20 | autonumber 3 "(#)" 21 | PrimApp <--> EdgeNode 22 | note left 23 | All metrics provided in Birth 24 | Certificate are created in the 25 | Edge Node folder within the 26 | Primary Application 27 | end note 28 | 29 | ||| 30 | 31 | Server <- EdgeNode : PUBLISH( **NDATA** ) 32 | note right 33 | Cellular RSSI level changes 34 | value. New value is updated 35 | in a payload and published. 36 | end note 37 | 38 | autonumber 4 "(#)" 39 | PrimApp <-- Server 40 | note left 41 | New RSSI metric is updated in 42 | the associated Edge Node 43 | folder. 44 | end note 45 | 46 | ||| 47 | 48 | autonumber 5 "(#)" 49 | PrimApp -> Server : PUBLISH( **NCMD** ) 50 | note left 51 | Send a command to the Edge 52 | Node to set Ethernet as the 53 | Primary network. Writing to 54 | the tag PRI_NETWORK in the 55 | Primary Host Application 56 | will automatically publish 57 | the new value. 58 | end note 59 | 60 | autonumber 5 "(#)" 61 | Server --> EdgeNode 62 | note right 63 | PRI_NETWORK set to a 64 | new value of 2. 65 | end note 66 | 67 | ||| 68 | @enduml -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/host-session-establishment.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | participant "Primary Host Application" as PrimApp #lightblue 3 | database "MQTT Server" as Server #lightblue 4 | 5 | activate Server 6 | 7 | 8 | note left of PrimApp 9 | Initial State of all Clients is 10 | in an **OFFLINE** state. 11 | end note 12 | 13 | autonumber 1 "(#)" 14 | PrimApp -> Server : CONNECT 15 | 16 | ||| 17 | PrimApp -> Server : SUBSCRIBE( **spBv1.0/#** ) 18 | ||| 19 | PrimApp -> Server : SUBSCRIBE( **STATE/host_app_id** ) 20 | 21 | 22 | PrimApp -> Server : PUBLISH( **STATE** ) 23 | activate PrimApp 24 | 25 | note right 26 | Primary Application MQTT Session 27 | established and **'STATE'** Death Certificate 28 | registered. Current 'STATE' is **ONLINE**. 29 | end note 30 | 31 | 32 | note left of PrimApp 33 | Metric now shows MQTT 34 | Client **ONLINE** with MQTT 35 | Server. 36 | end note 37 | 38 | ... (normal operation) ... 39 | 40 | 41 | PrimApp <--> Server : "Loss of Connection" 42 | autonumber stop 43 | 44 | note left of PrimApp 45 | Loss of TCP/IP 46 | connection to MQTT 47 | Server sets Metric to 48 | OFFLINE. 49 | end note 50 | 51 | deactivate PrimApp 52 | 53 | note right 54 | Primary Application MQTT Session 55 | terminated. Death Certificate delivered 56 | with **OFFLINE** payload and RETAINED 57 | topic. 58 | end note 59 | 60 | PrimApp -> Server : CONNECT 61 | 62 | ||| 63 | PrimApp -> Server : SUBSCRIBE( **spBv1.0/#** ) 64 | ||| 65 | PrimApp -> Server : SUBSCRIBE( **STATE/host_app_id** ) 66 | 67 | 68 | PrimApp -> Server : PUBLISH( **STATE** ) 69 | activate PrimApp 70 | 71 | note right 72 | Primary Application MQTT Session 73 | established and **'STATE'** Death Certificate 74 | registered. Current 'STATE' is **ONLINE**. 75 | end note 76 | 77 | 78 | note left of PrimApp 79 | Updated Metric shows 80 | **ONLINE** again. 81 | end note 82 | 83 | ... (normal operation) ... 84 | 85 | ||| 86 | @enduml -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/infrastructure-components.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | /' 3 | ' The "left to right direction" directive below changes how the engine renders the diagram. 4 | ' 5 | ' Since the default layout is "top to bottom", we need to specify directions that take into account 6 | ' the global change in orientation. So, below, "right" means "up" and "up" means "left". 7 | ' 8 | ' The order of the associations also influences the rendering order. 9 | ' 10 | ' Unfortunately, the AsciiDoctor PlantUML plugin dfoes not invoke the PlantUMP pre-processor. 11 | ' The consequence is that we have lots of redundant markup instead of clean procedures below. 12 | '/ 13 | left to right direction 14 | hide stereotype 15 | skinparam linetype polyline 16 | skinparam nodesep 30 17 | skinparam ranksep 40 18 | skinparam defaultTextAlignment center 19 | skinparam monochrome true 20 | 21 | skinparam rectangle { 22 | BackgroundColor #white 23 | } 24 | 25 | skinparam rectangle<> { 26 | BackgroundColor #lightgrey 27 | } 28 | 29 | skinparam rectangle<> { 30 | BackgroundColor #white 31 | BorderThickness 0 32 | BorderColor transparent 33 | FontSize 36 34 | Shadowing false 35 | } 36 | 37 | skinparam rectangle<> { 38 | BackgroundColor #white 39 | FontColor #white 40 | BorderThickness 0 41 | BorderColor transparent 42 | FontSize 36 43 | Shadowing 0.0 44 | } 45 | 46 | 47 | skinparam card { 48 | BackgroundColor #white 49 | FontSize 12 50 | BorderThickness 1 51 | Padding 0 52 | } 53 | 54 | skinparam package { 55 | Style rectangle 56 | } 57 | 58 | 59 | package "Security" { 60 | 61 | together { 62 | rectangle SCADA [ 63 | ===""Primary Application"" 64 | "" "" 65 | ""(Sparkplug)"" 66 | ] 67 | rectangle SCADAStandby<> [ 68 | ===""Primary Application"" 69 | ""Standby"" 70 | ""(Sparkplug)"" 71 | ] 72 | rectangle MQTTServer [ 73 | ==""MQTT Server"" 74 | ] 75 | } 76 | 77 | together { 78 | rectangle MES [ 79 | ===""MES"" 80 | ""(Sparkplug)"" 81 | ] 82 | rectangle Historian [ 83 | ===""Historian"" 84 | ""(Sparkplug)"" 85 | ] 86 | rectangle Analytics [ 87 | ===""Analytics"" 88 | ""(Sparkplug)"" 89 | ] 90 | } 91 | 92 | together { 93 | rectangle Node1 [ 94 | ===""Edge Node"" 95 | ""(Sparkplug)"" 96 | ] 97 | 98 | card Node1Device1 [ 99 | "" Device "" 100 | ] 101 | card Node1Sensor1 [ 102 | "" Sensor "" 103 | ] 104 | card Node1Device2 [ 105 | "" Device "" 106 | ] 107 | } 108 | 109 | together { 110 | rectangle Node2 [ 111 | ===""Edge Node"" 112 | ""(Sparkplug)"" 113 | ] 114 | card Node2Device [ 115 | "" Device "" 116 | ] 117 | } 118 | 119 | together { 120 | rectangle Node3 [ 121 | ===""Edge Node"" 122 | ""(Sparkplug)"" 123 | ] 124 | card Node3Device [ 125 | "" Device "" 126 | ] 127 | } 128 | 129 | together { 130 | rectangle Node4 [ 131 | ===""Edge Node"" 132 | ""(Sparkplug)"" 133 | ] 134 | card Node420maInput1 [ 135 | "" 4-20ma Input "" 136 | ] 137 | card Node420maInput2 [ 138 | "" 4-20ma Input "" 139 | ] 140 | card Node4DigitalInput [ 141 | ""Digital Input "" 142 | ] 143 | card Node4DigitalOutput [ 144 | ""Digital Output"" 145 | ] 146 | } 147 | 148 | together { 149 | rectangle "OT" <> as OT 150 | rectangle "IT" <> as IT 151 | 152 | } 153 | 154 | [MQTTServer] <-left--> [SCADAStandby] 155 | [MQTTServer] <-right--> [SCADA] 156 | 157 | [MQTTServer] -up--> [Analytics] 158 | [MQTTServer] -up--> [Historian] 159 | [MQTTServer] -up--> [MES] 160 | 161 | [MQTTServer] <-down---> [Node1] 162 | [MQTTServer] <-down---> [Node2] 163 | [MQTTServer] <-down---> [Node3] 164 | [MQTTServer] <-down---> [Node4] 165 | 166 | [Node1] -down-- [Node1Device1] 167 | [Node1] -down-- [Node1Sensor1] 168 | [Node1] -down-- [Node1Device2] 169 | 170 | [Node2] -down-- [Node2Device] 171 | 172 | [Node3] -down-- [Node3Device] 173 | 174 | [Node4] -down-- [Node420maInput1] 175 | [Node4] -down-- [Node420maInput2] 176 | [Node4] -down-- [Node4DigitalInput] 177 | [Node4] -down-- [Node4DigitalOutput] 178 | 179 | [OT] -[hidden]up----- [IT] 180 | } 181 | 182 | @enduml 183 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/mqtt-device-session-establishment.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | participant "Primary Host Application" as PrimApp #lightblue 3 | database "MQTT Server" as Server #lightblue 4 | participant "Sparkplug Edge Node" as EdgeNode #lightblue 5 | participant "Sparkplug Device" as Device #lightblue 6 | 7 | activate PrimApp 8 | activate Server 9 | activate EdgeNode 10 | 11 | 12 | autonumber 1 "(#)" 13 | PrimApp <--> Server 14 | autonumber 1 "(#)" 15 | Server <--> EdgeNode 16 | 17 | PrimApp <-> Server 18 | Server <-> EdgeNode 19 | 20 | ||| 21 | 22 | autonumber stop 23 | EdgeNode <- Device 24 | note right 25 | Device metrics are available 26 | and ready to publish. 27 | end note 28 | 29 | autonumber 4 "(#)" 30 | Server <- EdgeNode : PUBLISH( **DBIRTH** ) 31 | activate Device 32 | note left 33 | Create/update Device folder 34 | and metrics. Set Device 35 | to 'ONLINE' and metrics 36 | to quality 'GOOD' 37 | end note 38 | 39 | ||| 40 | 41 | autonumber stop 42 | EdgeNode <- Device 43 | note right 44 | Any device process variable 45 | or metric change. 46 | end note 47 | 48 | autonumber 5 "(#)" 49 | Server <- EdgeNode : PUBLISH ( **DDATA** ) 50 | note left 51 | Update only metrics that 52 | change. 53 | end note 54 | 55 | autonumber stop 56 | EdgeNode <- Device 57 | note right 58 | Device data becomes 59 | unavailable or stale. 60 | end note 61 | 62 | autonumber 6 "(#)" 63 | Server <- EdgeNode : PUBLISH ( **DDEATH** ) 64 | note left 65 | All associated Device 66 | metrics are set to 'STALE' 67 | end note 68 | 69 | deactivate Device 70 | 71 | ||| 72 | @enduml -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/payload-metric-folder-structure.puml: -------------------------------------------------------------------------------- 1 | @startsalt 2 | scale 2 3 | { 4 | {T! 5 | +**__Metric__** | **__Value__** | **__Data Type__** 6 | + **group_id** | "" "" | "" "" 7 | ++ edge_node_id | "" "" | "" "" 8 | +++ device_id | "" "" | "" "" 9 | ++++ Metric Level 1 | "" "" | "" "" 10 | +++++ Metric Level 2 | "" "" | "" "" 11 | ++++++ Metric Name | "" "" | "" "" 12 | } 13 | } 14 | @endsalt 15 | 16 | 17 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/primary-application-state-flow.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | !pragma teoz true 3 | participant "Primary Host Application" as PrimApp #lightblue 4 | database "MQTT Server #1" as Server1 #lightblue 5 | database "MQTT Server #2" as Server2 #lightblue 6 | database "MQTT Server #n" as ServerN #lightblue 7 | participant "Edge Node" as EdgeNode #lightblue 8 | 9 | activate PrimApp 10 | activate EdgeNode 11 | 12 | note left of PrimApp 13 | Establish a session with all defined 14 | MQTT servers in the infrastructure 15 | and publish a STATE Birth Certificate 16 | end note 17 | 18 | note right of EdgeNode 19 | **(1)** 20 | Establish an MQTT Session with 21 | server and then subscribe to the 22 | STATE message. If payload is 23 | "OFFLINE" walk to the next server. 24 | end note 25 | 26 | & Server1 <-[#Black]-> EdgeNode 27 | 28 | PrimApp <-[#Black]> Server1 : **(2)** STATE="ONLINE" 29 | activate Server1 #LightGreen 30 | 31 | PrimApp <-[#Black]> Server2 : STATE="ONLINE" 32 | activate Server2 #LightGreen 33 | 34 | PrimApp <-[#Black]> ServerN : STATE="ONLINE" 35 | activate ServerN #LightGreen 36 | 37 | ||| 38 | 39 | Server2 <-[#Black]> EdgeNode : **(3)** 40 | note right 41 | STATE for this server is currently 42 | "ONLINE" so stay connected here. 43 | end note 44 | 45 | Server2-[hidden]->Server2 46 | 47 | PrimApp <-[#Red]-> Server2 : Network issues and MQTT session terminated 48 | note left 49 | All tags for all Edge Nodes and 50 | Devices connected to server #2 51 | set to a data quality of "STALE". 52 | All connection metrics updated. 53 | end note 54 | deactivate Server2 #Red 55 | & Server2 <-[#Black]> EdgeNode : **(4)** STATE="OFFLINE" 56 | /note right of EdgeNode 57 | STATE for this server changed to 58 | "OFFLINE". Connect to next 59 | available server. 60 | end note 61 | 62 | 63 | note left of PrimApp 64 | Primary Application keeps trying to 65 | reestablish a session to server #2. 66 | Upon success, the STATE is 67 | updated with a new publish. 68 | end note 69 | 70 | ServerN <-[#Black]> EdgeNode : **(5)** 71 | PrimApp <-[#Black]> Server2 : **(6)** STATE="ONLINE" 72 | activate Server2 #LightGreen 73 | 74 | PrimApp-[hidden]->PrimApp 75 | 76 | @enduml 77 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/primary-host-application-state-flow-diagram.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | participant "Primary Host Application" as PrimApp #lightblue 3 | database "MQTT Server\n #1" as Server1 #lightblue 4 | database "MQTT Server\n #2" as Server2 #lightblue 5 | database "MQTT Server\n #n" as ServerN #lightblue 6 | participant "Sparkplug Edge Node" as EdgeNode #lightblue 7 | 8 | activate PrimApp 9 | activate Server1 10 | activate Server2 11 | activate ServerN 12 | activate EdgeNode 13 | 14 | note left of PrimApp 15 | Establish a session with all defined 16 | MQTT Servers in the infrastructure 17 | and publish a STATE Birth 18 | Certificate. 19 | end note 20 | 21 | autonumber 1 "(#)" 22 | Server1 <--> EdgeNode 23 | note right 24 | Establish an MQTT Session with 25 | server and then subscribe to the 26 | STATE message. If the payload is 27 | "OFFLINE" walk to the next Server. 28 | end note 29 | 30 | 31 | autonumber 2 "(#)" 32 | PrimApp <-> Server1 : STATE = 'ONLINE' 33 | activate Server1 #green 34 | 35 | autonumber 2 "(#)" 36 | PrimApp <-> Server2 : STATE = 'ONLINE' 37 | activate Server2 #green 38 | 39 | autonumber 2 "(#)" 40 | PrimApp <-> ServerN : STATE = 'ONLINE' 41 | activate ServerN #green 42 | 43 | autonumber 3 "(#)" 44 | Server2 <-> EdgeNode 45 | note right 46 | STATE for this server is currently 'ONLINE' 47 | so stay connected here. 48 | end note 49 | 50 | autonumber stop 51 | PrimApp <--> Server2 : Network issues\nand MQTT Session\nTerminated! 52 | note left 53 | All tags for all Edge Nodes and 54 | Devices connected to Server #2 55 | set to a data quality of 'STALE'. All 56 | connection metrics updated. 57 | end note 58 | 59 | deactivate Server2 60 | 61 | autonumber 4 "(#)" 62 | Server2 -> EdgeNode : STATE = 'OFFLINE' 63 | note right 64 | STATE for this server changed to 'OFFLINE. 65 | Connect to next available server. 66 | end note 67 | 68 | autonumber 5 "(#)" 69 | ServerN <-> EdgeNode 70 | 71 | autonumber 5 "(#)" 72 | PrimApp <-> Server2 : STATE = 'ONLINE' 73 | activate Server2 #green 74 | note left 75 | Primary Application keeps trying to 76 | reestablish a session to Server #2. 77 | Upon success, the STATE is 78 | updated with a new publish. 79 | end note 80 | 81 | ||| 82 | @enduml -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/simple-mqtt-infrastructure.puml: -------------------------------------------------------------------------------- 1 | @startuml 2 | hide stereotype 3 | skinparam linetype polyline 4 | skinparam nodesep 90 5 | skinparam ranksep 10 6 | skinparam defaultTextAlignment center 7 | skinparam monochrome true 8 | 9 | skinparam rectangle { 10 | BackgroundColor #white 11 | FontSize 28 12 | } 13 | 14 | skinparam rectangle<> { 15 | BackgroundColor #white 16 | BorderThickness 0 17 | BorderColor transparent 18 | FontSize 32 19 | Shadowing false 20 | } 21 | 22 | rectangle MQTTServer [ 23 | ==""MQTT Server"" 24 | ] 25 | together { 26 | 27 | rectangle Node1 [ 28 | "" "" 29 | ===""Sparkplug Edge Node"" 30 | "" "" 31 | ] 32 | rectangle "MQTT Client" <> as MQTTClient 33 | } 34 | 35 | 36 | 37 | [MQTTServer] <-right-----> [Node1] 38 | [Node1] -[hidden]up- [MQTTClient] 39 | 40 | @enduml 41 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/sparkplugb-metric-structure-1.puml: -------------------------------------------------------------------------------- 1 | @startsalt 2 | scale 2 3 | { 4 | {T! 5 | +**__Metric__** | "" "" | **__Value__** | **__Data Type__** 6 | +**Sparkplug B Devices** | /group_id | "" "" | "" "" 7 | ++ Raspberry Pi | /edge_node_id | "" "" | "" "" 8 | +++ Node Control | Node Control | "" "" | "" "" 9 | ++++ Reboot | "" "" | "" FALSE "" | "" Boolean "" 10 | ++++ Rebirth | "" "" | "" FALSE "" | "" Boolean "" 11 | ++++ Next Server | "" "" | "" FALSE "" | "" Boolean "" 12 | ++++ Scan Rate | "" "" | "" 3000 "" | "" Int64 "" 13 | +++ Properties | Node Properties | "" "" | "" "" 14 | ++++ Hardware Make | "" "" | "" Raspberry Pi "" | "" String "" 15 | ++++ Mardware Model | "" "" | "" Pi 3 Model B "" | "" String "" 16 | ++++ OS Name | "" "" | "" Raspbian "" | "" String "" 17 | ++++ OS Version | "" "" | "" Jessie with PIXEL/11.01.2017 "" | "" String "" 18 | ++++ Supply Voltage | "" "" | "" 12.1 "" | "" Float "" 19 | 20 | } 21 | } 22 | @endsalt 23 | 24 | 25 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/assets/plantuml/sparkplugb-metric-structure-2.puml: -------------------------------------------------------------------------------- 1 | @startsalt 2 | scale 2 3 | { 4 | {T! 5 | +**__Metric__** | "" "" | **__Value__** | **__Data Type__** 6 | +**Sparkplug B Devices** | /group_id | "" "" | "" "" 7 | ++ Raspberry Pi | /edge_node_id | "" "" | "" "" 8 | +++ Pibrella | /device_id | "" "" | "" "" 9 | ++++ Inputs | "" "" | "" "" | "" "" 10 | +++++ A | "" "" | "" FALSE "" | "" Boolean "" 11 | +++++ B | "" "" | "" FALSE "" | "" Boolean "" 12 | +++++ C | "" "" | "" FALSE "" | "" Boolean "" 13 | +++++ D | "" "" | "" FALSE "" | "" Boolean "" 14 | ++++ Outputs | "" "" | "" "" | "" "" 15 | +++++ LEDs | "" "" | "" "" | "" "" 16 | ++++++ Green | "" "" | "" FALSE "" | "" Boolean "" 17 | ++++++ Red | "" "" | "" FALSE "" | "" Boolean "" 18 | ++++++ Yellow | "" "" | "" FALSE "" | "" Boolean "" 19 | +++++ E | "" "" | "" FALSE "" | "" Boolean "" 20 | +++++ F | "" "" | "" FALSE "" | "" Boolean "" 21 | +++++ G | "" "" | "" FALSE "" | "" Boolean "" 22 | +++++ H | "" "" | "" FALSE "" | "" Boolean "" 23 | +++++ Buzzer | "" "" | "" FALSE "" | "" Boolean "" 24 | ++++ Properties | "" "" | "" "" | "" "" 25 | +++++ Hardware Make | "" "" | "" Pibrella "" | "" String "" 26 | } 27 | . 28 | {+ 29 | Everything under the Pibrella node is 30 | Device Process Variables and Metric Tags 31 | } 32 | 33 | 34 | } 35 | 36 | @endsalt -------------------------------------------------------------------------------- /specification/src/main/asciidoc/chapters/.asciidoctorconfig: -------------------------------------------------------------------------------- 1 | // provide hints were to find images when rendering in an IDE 2 | // see: https://intellij-asciidoc-plugin.ahus1.de/docs/users-guide/features/advanced/asciidoctorconfig-file.html 3 | :imagesdir: ../assets/images 4 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/chapters/Sparkplug_9_Acknowledgements.adoc: -------------------------------------------------------------------------------- 1 | //// 2 | Copyright © 2016-2021 The Eclipse Foundation, Cirrus Link Solutions, and others 3 | 4 | This program and the accompanying materials are made available under the 5 | terms of the Eclipse Public License v. 2.0 which is available at 6 | https://www.eclipse.org/legal/epl-2.0. 7 | 8 | SPDX-License-Identifier: EPL-2.0 9 | 10 | Sparkplug®, Sparkplug Compatible, and the Sparkplug Logo are trademarks of the Eclipse Foundation. 11 | //// 12 | 13 | // set default value if assetsdir hasn't been defined 14 | ifndef::assetsdir[:assetsdir:] 15 | 16 | [[acknowledgements]] 17 | == Acknowledgements 18 | 19 | The specification would not exist without the initial contribution of the Sparkplug specification by 20 | Cirrus Link Solutions, Wes Johnson, Chad Kienle, and Arlen Nipper. They have also continued to 21 | be involved in promoting, developing, and contributing to the Sparkplug community. 22 | 23 | The following individuals are members of the Eclipse Sparkplug Working Group, the Eclipse Sparkplug 24 | Specification project, the Eclipse Tahu project, or otherwise contributed in a meaningful way to the 25 | Sparkplug Specification. 26 | 27 | * Lukas Brand (HiveMQ) 28 | * Ilya Binshtok (Cirrus Link Solutions) 29 | * Justin Brzozoski (SignalFire) 30 | * Travis Cox (Inductive Automation) 31 | * Ian Craggs (individual) 32 | * Nathan Davenport (Cirrus Link Solutions) 33 | * Frédéric Desbiens (Eclipse Foundation) 34 | * Alex Godbehere (AMRC) 35 | * Anja Helmbrecht-Schaar (HiveMQ) 36 | * Benson Hougland (Opto 22) 37 | * Wes Johnson (Cirrus Link Solutions) 38 | * Chad Kienle (Cirrus Link Solutions) 39 | * Mitchell McPartland (Inductive Automation) 40 | * Bryce Nakatani (Opto 22) 41 | * Arlen Nipper (Cirrus Link Solutions) 42 | * Dominik Obermaier (HiveMQ) 43 | * Alexander Schwartz (individual) 44 | * Josh Wolf (Canary Labs) 45 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/chapters/Sparkplug_Appendix_A.adoc: -------------------------------------------------------------------------------- 1 | //// 2 | Copyright © 2016-2021 The Eclipse Foundation, Cirrus Link Solutions, and others 3 | 4 | This program and the accompanying materials are made available under the 5 | terms of the Eclipse Public License v. 2.0 which is available at 6 | https://www.eclipse.org/legal/epl-2.0. 7 | 8 | SPDX-License-Identifier: EPL-2.0 9 | 10 | _Sparkplug™ and the Sparkplug™ logo are trademarks of the Eclipse Foundation_ 11 | //// 12 | 13 | [[appendix_a]] 14 | == Appendix A: Open Source Software (non-normative) 15 | 16 | [[introduction_oasis_mqtt_specification]] 17 | === OASIS MQTT Specifications 18 | 19 | The Sparkplug Specification specifies that MQTT Server/Clients in the infrastructure adhere to the 20 | MQTT v3.1.1 and MQTT v5.0 Specifications. The Sparkplug Specification documentation refers to the 21 | following two links for the MQTT v3.1.1 and v5.0 Specifications. 22 | 23 | * MQTT v3.1.1: http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/mqtt-v3.1.1.html 24 | * MQTT v5.0: https://docs.oasis-open.org/mqtt/mqtt/v5.0/mqtt-v5.0.html 25 | 26 | Also referred is an addendum document to the MQTT v3.1.1 Specification document that discusses best 27 | practices for implementing security on MQTT TCP/IP networks: 28 | 29 | * http://docs.oasis-open.org/mqtt/mqtt-nist-cybersecurity/v1.0/mqtt-nist-cybersecurity-v1.0.doc 30 | 31 | [[introduction_eclipse_foundation_iot_resources]] 32 | === Eclipse Foundation IoT Resources 33 | 34 | The Eclipse Foundation is an excellent resource for open source software supporting industry 35 | standards. There is a Sparkplug Working Group responsible for maintaining and developing the 36 | Sparkplug Specification. 37 | 38 | * https://sparkplug.eclipse.org/ 39 | 40 | In addition to the Sparkplug Working Group, the Eclipse Foundation has an Internet of Things (IoT) 41 | working group providing a wealth of information and projects around the Internet of Things. 42 | 43 | * https://iot.eclipse.org/ 44 | 45 | [[introduction_eclipse_paho]] 46 | === Eclipse Paho 47 | 48 | Eclipse Paho™ is an Eclipse Foundation project that offers excellent resources for mature, compliant 49 | MQTT Client and MQTT Server implementations and well as additional resources for all things MQTT. 50 | 51 | * http://www.eclipse.org/paho/ 52 | 53 | [[introduction_google_protocol_buffers]] 54 | === Google Protocol Buffers 55 | 56 | Protocol buffers are Google's language-neutral, platform-neutral, extensible mechanism for 57 | serializing structured data. Google Protocol Buffers are used to encode the Sparkplug payload in 58 | both payload formats A and B of the Sparkplug Specification. 59 | 60 | https://developers.google.com/protocol-buffers/ 61 | 62 | [[introduction_eclipse_kura_schema]] 63 | === Eclipse Kura Google Protocol Buffer Schema 64 | 65 | Eclipse Kura is another Eclipse Foundation project under the IoT resources. Kura provides open 66 | source resources for the Google Protocol Buffer representation of MQTT payloads as defined in the 67 | original Sparkplug A payload definition. While no longer used in Sparkplug it was critical to the 68 | evolution of Sparkplug. 69 | 70 | * https://github.com/eclipse/kura/blob/develop/kura/org.eclipse.kura.core.cloud/src/main/protobuf/kurapayload.proto 71 | 72 | [[introduction_raspberry_pi]] 73 | === Raspberry Pi Hardware 74 | 75 | For the sake of keeping the Sparkplug Specification as real world as possible, a reference 76 | implementation of a Sparkplug Edge Node and associated Device is provided for the examples and 77 | screen shots in this document. All of this was implemented on Raspberry Pi hardware representing the 78 | Edge Node with a Pibrella I/O board representing the Device. 79 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/chapters/Sparkplug_Appendix_B.adoc: -------------------------------------------------------------------------------- 1 | //// 2 | Copyright © 2016-2021 The Eclipse Foundation, Cirrus Link Solutions, and others 3 | 4 | This program and the accompanying materials are made available under the 5 | terms of the Eclipse Public License v. 2.0 which is available at 6 | https://www.eclipse.org/legal/epl-2.0. 7 | 8 | SPDX-License-Identifier: EPL-2.0 9 | 10 | _Sparkplug™ and the Sparkplug™ logo are trademarks of the Eclipse Foundation_ 11 | //// 12 | 13 | [[appendix_b]] 14 | == Appendix B: List of Normative Statements (non-normative) 15 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/sparkplug_spec.adoc: -------------------------------------------------------------------------------- 1 | //// 2 | Copyright © 2016-2022 The Eclipse Foundation, Cirrus Link Solutions, and others 3 | 4 | This program and the accompanying materials are made available under the 5 | terms of the Eclipse Public License v. 2.0 which is available at 6 | https://www.eclipse.org/legal/epl-2.0. 7 | 8 | SPDX-License-Identifier: EPL-2.0 9 | //// 10 | 11 | = Sparkplug 3.0.0: Sparkplug Specification 12 | Eclipse Sparkplug Contributors 13 | Version 3.0.0 Release, {docdate} 14 | // Settings: 15 | //:experimental: 16 | :reproducible: 17 | :icons: font 18 | :listing-caption: Listing 19 | :sectnums: 20 | :toc: 21 | :toclevels: 3 22 | :docinfo: shared,private 23 | :autofit-option: 24 | // after importing all the figures had a specific number, therfore, disable automatic numbering by unsetting the figure caption 25 | :figure-caption!: 26 | :assetsdir: 27 | ifdef::backend-pdf[] 28 | :sectanchors: 29 | :doctype: book 30 | :compat-mode: 31 | :pdf-page-size: Letter 32 | :source-highlighter: rouge 33 | :rouge-style: googlecode 34 | endif::[] 35 | 36 | image::extracted-media/media/image3.png[image,width=195,height=90] 37 | image::extracted-media/media/image4.png[image,width=200,height=80] 38 | 39 | [cols=",,,",options="header",] 40 | |=== 41 | |*Revision Number* |*Date* |*Author* |*Description* 42 | |1.0 |5/26/16 |Cirrus Link |Initial Release 43 | |2.1 |12/10/16 |Cirrus Link |Payload B Addition 44 | |2.2 |10/11/19 |Cirrus Link |Re-branding for Eclipse foundation added TM to Sparkplug 45 | |3.0.0 |11/16/22 |Eclipse Sparkplug Specification Project Team |Reorganized to be in AsciiDoc format and to include normative and non-normative statements 46 | |=== 47 | 48 | include::chapters/Sparkplug_1_Introduction.adoc[] 49 | include::chapters/Sparkplug_2_Principles.adoc[] 50 | include::chapters/Sparkplug_3_Components.adoc[] 51 | include::chapters/Sparkplug_4_Topics.adoc[] 52 | include::chapters/Sparkplug_5_Operational_Behavior.adoc[] 53 | include::chapters/Sparkplug_6_Payloads.adoc[] 54 | include::chapters/Sparkplug_7_Security.adoc[] 55 | include::chapters/Sparkplug_8_HA.adoc[] 56 | include::chapters/Sparkplug_9_Acknowledgements.adoc[] 57 | include::chapters/Sparkplug_10_Conformance.adoc[] 58 | include::chapters/Sparkplug_Appendix_A.adoc[] 59 | include::chapters/Sparkplug_Appendix_B.adoc[] 60 | -------------------------------------------------------------------------------- /specification/src/main/asciidoc/themes/sparkplug-theme.yml: -------------------------------------------------------------------------------- 1 | extends: default 2 | role: 3 | yellow-background: 4 | background_color: #FFFF00 5 | -------------------------------------------------------------------------------- /specification/src/main/xsl/normative-statements.xsl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | === 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | * 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /tck/.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .gradle 3 | .classpath 4 | .project 5 | classes 6 | .settings 7 | **/bin/ 8 | build/ 9 | .DS_Store 10 | **/Requirements.java 11 | SparkplugTCKResults.log 12 | summary.html 13 | .lck 14 | Eclipse-Sparkplug-TCK*.zip 15 | UserGuide.html 16 | -------------------------------------------------------------------------------- /tck/README.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |

Eclipse™ Sparkplug™ TCK

4 | 5 |

This is the Eclipse Sparkplug Test Compatibility Kit (TCK). 6 | For Sparkplug certification requests, this TCK should be downloaded in binary form 7 | from Eclipse downloads. 8 | This README is meant to be read as part of that package. 9 |

10 | 11 |

Listing Requests for Compatible Products

12 | 13 |

Sparkplug is a self-certification ecosystem. If you wish to have your product featured 14 | in the official list of compatible products, a listing request is required and must be approved 15 | by the Eclipse Sparkplug Specification Project Team. 16 |

17 | 18 |

There are additional requirements that MUST be met by any organization wishing to use the 19 | “Sparkplug Compatible” logo or [Sparkplug website](https://sparkplug.eclipse.org/) for promotion. 20 | Any request for listing from an organization not meeting the requirements will be held until such 21 | time as the requirements are met. See the 22 | Sparkplug Trademark Guidelines 23 | for more information. 24 |

25 | 26 |

An approved listing request is a statement from the Specification Project that you 27 | have met the intended TCK requirements and is just one of the requirements for logo usage. 28 |

29 | 30 |

More information on the process and requirements are available here. 31 |

32 | 33 |

Sparkplug and Sparkplug TCK Resources

34 | 35 | 49 | 50 | -------------------------------------------------------------------------------- /tck/README.md: -------------------------------------------------------------------------------- 1 | # Eclipse™ Sparkplug™ TCK 2 | 3 | This is the Eclipse Sparkplug Test Compatibility Kit (TCK). For Sparkplug certification requests, this TCK should be downloaded in binary form 4 | from [sparkplug.eclipse.org/compatibility/get-listed](https://sparkplug.eclipse.org/compatibility/get-listed/). This README is meant to be read as part of that package. 5 | 6 | ## Listing Requests for Compatible Products 7 | 8 | Sparkplug is a self-certification ecosystem. If you wish to have your product featured in the official list of compatible products, a listing request is required and must be approved by the Eclipse Sparkplug Specification Project Team. 9 | 10 | There are additional requirements that MUST be met by any organization wishing to use the “Sparkplug Compatible” logo or [Sparkplug website](https://sparkplug.eclipse.org/) for promotion. Any request for listing from an organization not meeting the requirements will be held until such time as the requirements are met. See the [Sparkplug Trademark Guidelines](https://sparkplug.eclipse.org/compatibility/get-listed/documents/sparkplug-trademark-guidelines.pdf) for more information. 11 | 12 | An approved listing request is a statement from the Specification Project that you have met the intended TCK requirements and is just one of the requirements for logo usage. 13 | 14 | More information on the process and requirements are available [here](https://sparkplug.eclipse.org/compatibility/get-listed/). 15 | 16 | ## Sparkplug and Sparkplug TCK Resources 17 | 18 | * [TCK User Guide](https://github.com/eclipse-sparkplug/sparkplug/blob/develop/tck/UserGuide.adoc) 19 | 20 | * Test coverage document: coverage-report/coverage-sparkplug.html 21 | 22 | * [Eclipse Tahu Sparkplug Compatible Implementations Guide](https://github.com/eclipse/tahu/blob/master/java/README.md) 23 | 24 | * Request a new [Sparkplug Product Listing](https://github.com/eclipse-sparkplug/sparkplug.listings/issues/new?assignees=&labels=getlisted%2Ctriage&template=GET-LISTED.yml&title=Get+Listed+Request%3A+%5BADD+DETAILS+HERE%5D) 25 | 26 | * [Report a bug](https://github.com/eclipse-sparkplug/sparkplug/issues/new?assignees=&labels=bugreport%2Ctriage&template=BUG-REPORT.yml&title=Bug+Report%3A+%5BADD+DETAILS+HERE%5D) in the Sparkplug TCK 27 | 28 | * [Ask a question](https://github.com/eclipse-sparkplug/sparkplug/issues/new?assignees=&labels=question%2Ctriage&template=QUESTION.yml&title=Question%3A+%5BADD+SUMMARY+HERE%5D) about Sparkplug or the Sparkplug TCK 29 | 30 | -------------------------------------------------------------------------------- /tck/eftckl-v10: -------------------------------------------------------------------------------- 1 | https://www.eclipse.org/legal/tck.php 2 | 3 | Eclipse Foundation Technology Compatibility Kit License - v1.0 4 | 5 | Copyright (c) 2018, Eclipse Foundation, Inc. and its licensors. 6 | 7 | Redistribution and use in binary form is permitted provided that the following conditions are met: 8 | 9 | 1. Use of the Technology Compatibility Kit accompanying this license ( the “TCK”) and its documentation is permitted solely for the purpose of testing compatibility of an implementation (the “Product”) of a specification (the “Specification”) made available by the Eclipse Foundation, Inc. (“Eclipse”). 10 | 11 | 2.Only those modifications expressly permitted by the TCK and its documentation are permitted. Except in these limited circumstances, no modifications to the TCK are permitted under this license. 12 | 13 | 3. A Product will be deemed to be “compatible” with the Specification if it fully and completely meets and satisfies all requirements of the TCK. 14 | 15 | 4. Before any claim of compatibility (or any similar claim suggesting compatibility) is made based on the TCK, the testing party must: 16 | a. use the TCK to demonstrate that the Product fully and completely meets and satisfies all requirements of the TCK; 17 | b. make TCK test results showing full and complete satisfaction of all requirements of the TCK publicly available on the testing party’s website and send a link to such test results to Eclipse at tck@eclipse.org; and 18 | c. comply with any requirements stated in the Specification with regard to subsetting, supersetting, modifying or extending the Specification in any Product claimed to be compatible with the Specification. 19 | 20 | 5. The test results must be continuously available and the link must be live for at least as long as the Product is available in the marketplace. 21 | 22 | 6. The TCK may not be used as a basis for any statements of partial compatibility. The TCK may only be used as a basis for true, factual statements of full compatibility of Products that fully meet and satisfy all requirements of the TCK. 23 | 24 | 7. A determination that a Product is compatible with the TCK does not, in itself, give rise to the right to use any name, mark, logo associated with the TCK, Eclipse, or Eclipse’s contributors or licensors. 25 | 26 | 8. Upon the request of Eclipse, a tester will retract any statements of compatibility (or any similar claim suggesting compatibility) which Eclipse reasonably determines to be false or misleading or in violation of the terms of this license. 27 | 28 | 9. Redistribution of the TCK must be under this Eclipse Foundation Technology Compatibility Kit License and must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 29 | 30 | 10. Neither the name, trademarks or logos of Eclipse, nor the names, trademarks or logos of its contributors or licensors may be used to endorse or promote products tested with this software without specific prior written permission. 31 | 32 | 11. The source code for the TCK accompanying this license is available from Eclipse. 33 | 34 | TO THE EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED ON AN “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON- INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. TO THE EXTENT PERMITTED BY APPLICABLE LAW, NEITHER THE COPYRIGHT OWNER OR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 35 | 36 | -------------------------------------------------------------------------------- /tck/gradle.properties: -------------------------------------------------------------------------------- 1 | version=3.0.0 2 | # 3 | # main props 4 | # 5 | org.gradle.jvmargs=-Xmx2048m 6 | org.gradle.daemon=false 7 | file.encoding=utf-8 8 | # 9 | # main dependencies 10 | # 11 | hivemq-client.version=1.3.0 12 | paho.version=1.2.5 13 | protobuf.version=3.17.3 14 | beanvalidation.tck.version=2.0.6 15 | jboss.test-audit.version=2.0.0.Final 16 | jakarta.annotation.version=2.0.0 17 | jackson.version=2.9.8 18 | jetbrainsAnnotations.version=21.0.1 19 | guava.version=31.1-jre 20 | # 21 | # plugins 22 | # 23 | plugin.hivemq-extension.version=2.1.2 24 | plugin.license.version=0.16.1 25 | plugin.defaults.version=0.2.0 26 | plugin.download.version=4.1.1 27 | -------------------------------------------------------------------------------- /tck/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /tck/hivemq-configuration/config.xml: -------------------------------------------------------------------------------- 1 | 2 | 19 | 20 | 21 | 22 | 23 | 1883 24 | 0.0.0.0 25 | 26 | 27 | 8000 28 | 0.0.0.0 29 | /mqtt 30 | 31 | mqttv3.1 32 | mqtt 33 | 34 | true 35 | 36 | 37 | 38 | 39 | true 40 | 41 | 42 | -------------------------------------------------------------------------------- /tck/hivemq-configuration/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | %-30(%d %level)- %msg%n%ex 6 | 7 | 8 | 9 | ${hivemq.log.folder}/hivemq.log 10 | true 11 | 12 | 13 | 14 | ${hivemq.log.folder}/hivemq.%d{yyyy-MM-dd}.log 15 | 16 | 17 | 30 18 | 19 | 20 | %-30(%d %level)- %msg%n%ex 21 | 22 | 23 | 24 | 25 | ${hivemq.log.folder}/sparkplug.log 26 | true 27 | 28 | %-24(%d)- %msg%n%ex 29 | 30 | 31 | ${hivemq.log.folder}/event-%i.log.gz 32 | 1 33 | 5 34 | 35 | 36 | 100MB 37 | 38 | 39 | 40 | 41 | ${hivemq.log.folder}/migration.log 42 | true 43 | 44 | %-30(%d %level)- %msg%n%ex 45 | 46 | 47 | 48 | 49 | ${hivemq.log.folder}/event.log 50 | true 51 | 52 | %-24(%d)- %msg%n%ex 53 | 54 | 55 | ${hivemq.log.folder}/event-%i.log.gz 56 | 1 57 | 5 58 | 59 | 60 | 100MB 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /tck/package.py: -------------------------------------------------------------------------------- 1 | #!/bin/python3 2 | """******************************************************************************** 3 | * Copyright (c) 2022 Ian Craggs 4 | * 5 | * This program and the accompanying materials are made available under the 6 | * terms of the Eclipse Public License 2.0 which is available at 7 | * http://www.eclipse.org/legal/epl-2.0. 8 | * 9 | * SPDX-License-Identifier: EPL-2.0 10 | * 11 | * Contributors: 12 | * Ian Craggs - initial implementation 13 | ********************************************************************************""" 14 | 15 | import zipfile, glob, os, sys 16 | 17 | files = \ 18 | ["build/coverage-report/", 19 | "eftckl-v10", 20 | "build/hivemq-extension/sparkplug-tck-3.0.0.zip", 21 | "build/hivemq-extension/sparkplug-tck-3.0.0.zip.sig", 22 | "hivemq-configuration/", 23 | "report.py", 24 | "UserGuide.html", 25 | "README.md", 26 | "README.html" 27 | ] 28 | 29 | zipfilename = "Eclipse-Sparkplug-TCK-3.0.0.zip" 30 | prefix = "SparkplugTCK/" 31 | 32 | try: 33 | os.remove(zipfilename) 34 | except: 35 | pass 36 | 37 | # Update the UserGuide.html doc 38 | os.system("asciidoc UserGuide.adoc") 39 | 40 | # update the tck jar notices directory 41 | jarfilename = "build/hivemq-extension/sparkplug-tck-3.0.0.jar" 42 | 43 | # get the webconsole directory except the node_modules subdir 44 | webconsole_files = glob.glob("webconsole/*") 45 | ignores = ["node_modules", "layouts", "dist", "static", "middleware", "store", "plugins"] 46 | 47 | for file in webconsole_files: 48 | for ignore in ignores: 49 | if file.find(ignore) != -1: 50 | break 51 | else: 52 | if file.find(".") == -1: 53 | file = file + "/" 54 | files.append(file) 55 | 56 | def zipwrite(entry, tckzip): 57 | # remove the build prefix if there is any 58 | if entry.startswith("build/"): 59 | arcname = entry[6:] 60 | else: 61 | arcname = entry 62 | tckzip.write(entry, prefix + arcname) 63 | 64 | with zipfile.ZipFile(zipfilename, "w", compression=zipfile.ZIP_DEFLATED) as tckzip: 65 | for entry in files: 66 | 67 | if type(entry) == type((0,)): 68 | entry, newname = entry 69 | 70 | if entry.endswith("/"): 71 | files = glob.glob(entry+"**", recursive=True) 72 | for file in files: 73 | zipwrite(file, tckzip) 74 | else: 75 | zipwrite(entry, tckzip) 76 | 77 | tckzip.close() 78 | 79 | #print(tckzip.namelist()) 80 | -------------------------------------------------------------------------------- /tck/package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ################################################################################### 3 | # Copyright (c) 2022 Wes Johnson 4 | # 5 | # This program and the accompanying materials are made available under the 6 | # terms of the Eclipse Public License 2.0 which is available at 7 | # http://www.eclipse.org/legal/epl-2.0. 8 | # 9 | # SPDX-License-Identifier: EPL-2.0 10 | # 11 | # Contributors: 12 | # Ian Craggs - initial implementation in Python (package.py) 13 | # Wes Johnson - initial shell implementation derived from package.py 14 | ################################################################################### 15 | 16 | FILES="build/coverage-report/coverage-sparkplug.html 17 | build/coverage-report/images/stickynote.png 18 | build/coverage-report/images/blank.png 19 | eftckl-v10 20 | build/hivemq-extension/sparkplug-tck-3.0.0.zip 21 | build/hivemq-extension/sparkplug-tck-3.0.0.zip.sig 22 | hivemq-configuration/logback.xml 23 | hivemq-configuration/config.xml 24 | report.py 25 | UserGuide.html 26 | README.md 27 | README.html" 28 | 29 | ZIP_FILE_NAME=Eclipse-Sparkplug-TCK-3.0.0.zip 30 | PREFIX=build/SparkplugTCK/ 31 | 32 | # Delete the old version 33 | rm -f ${ZIP_FILE_NAME} 34 | 35 | # Update the UserGuide.html doc 36 | asciidoc UserGuide.adoc 37 | 38 | # Clean out and create the staging directory 39 | rm -fr ${PREFIX} 40 | mkdir ${PREFIX} 41 | 42 | # get the webconsole directory except the node_modules subdir 43 | WEBCONSOLE_FILES=`find webconsole | grep -v node_modules | grep -v layouts | grep -v dist | grep -v status | grep -v static | grep -v middleware | grep -v store | grep -v plugins | grep -v "\.nuxt" | grep -v "\.gitignore" | grep -v "\.editorconfig"` 44 | 45 | for FILE in $FILES ; do 46 | rsync -R $FILE ${PREFIX} 47 | done 48 | 49 | for FILE in $WEBCONSOLE_FILES ; do 50 | if [ x${FILE} == "xwebconsole" ] ; then 51 | continue 52 | fi 53 | 54 | rsync -R $FILE ${PREFIX} 55 | done 56 | 57 | BUILD_FILES=`ls -1 ${PREFIX}build` 58 | for FILE in $BUILD_FILES ; do 59 | mv ${PREFIX}build/$FILE ${PREFIX} 60 | done 61 | rmdir ${PREFIX}build 62 | 63 | cd build 64 | zip -r ../${ZIP_FILE_NAME} SparkplugTCK/ 65 | -------------------------------------------------------------------------------- /tck/requirements.py: -------------------------------------------------------------------------------- 1 | """******************************************************************************** 2 | * Copyright (c) 2022 Ian Craggs 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Ian Craggs - initial implementation 12 | ********************************************************************************""" 13 | 14 | """ 15 | Extract assertion information from the Sparkplug specification generated output 16 | and convert it into constants that the Java test programs can use. 17 | """ 18 | 19 | import xml.dom.minidom 20 | 21 | inputFile = "../specification/build/tck-audit.xml" 22 | outputFile = "src/main/java/org/eclipse/sparkplug/tck/test/common/Requirements.java" 23 | 24 | outfile = open(outputFile, "w") 25 | 26 | outfile.write(""" 27 | /* 28 | * Copyright (c) 2022 Anja Helmbrecht-Schaar, Ian Craggs 29 | *

30 | * All rights reserved. This program and the accompanying materials 31 | * are made available under the terms of the Eclipse Public License v2.0 32 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 33 | *

34 | * SPDX-License-Identifier: EPL-2.0 35 | *

36 | * Contributors: 37 | * Anja Helmbrecht-Schaar - initial implementation and documentation 38 | */ 39 | package org.eclipse.sparkplug.tck.test.common; 40 | 41 | /** 42 | * Class that contains all Requirement IDs and Description, that have to check. 43 | */ 44 | public class Requirements { 45 | 46 | // @SpecAssertions works only with constants like string but not enum or arrays 47 | 48 | """) 49 | 50 | with xml.dom.minidom.parse(inputFile) as dom: 51 | 52 | assertions = 0 53 | def traverse(node, assertion_id): 54 | global assertions 55 | 56 | if node.nodeName == "section": 57 | #print([node.attributes.item(i).value for i in range(node.attributes.length)]); 58 | outfile.write(" // %s %s\n" % (node.childNodes[1].data.split()[0], node.attributes.item(2).value)) 59 | 60 | elif (node.nodeName == "assertion"): 61 | assertions += 1 62 | assert node.attributes.item(0).name == "id" 63 | assertion_id = node.attributes.item(0).value 64 | 65 | elif assertion_id and node.nodeName == "text": 66 | upper_assertion_id = assertion_id.upper().replace("-", "_") 67 | outfile.write(" public final static String ID_%s = \"%s\";\n" % (upper_assertion_id, assertion_id)) 68 | outfile.write(" public final static String %s = \"%s\";\n\n" % (upper_assertion_id, node.childNodes[0].data.replace("\"", "'"))) 69 | 70 | for child in node.childNodes: 71 | traverse(child, assertion_id) 72 | 73 | traverse(dom, None) 74 | 75 | outfile.write("}\n// no of assertions %d\n" % assertions) 76 | outfile.close() 77 | 78 | -------------------------------------------------------------------------------- /tck/settings.gradle.kts: -------------------------------------------------------------------------------- 1 | rootProject.name = "sparkplug-tck" 2 | 3 | includeBuild("../specification") 4 | 5 | pluginManagement { 6 | repositories { 7 | gradlePluginPortal() 8 | } 9 | plugins { 10 | id("com.hivemq.extension") version "${extra["plugin.hivemq-extension.version"]}" 11 | id("com.github.hierynomus.license-report") version "${extra["plugin.license.version"]}" 12 | id("io.github.sgtsilvio.gradle.defaults") version "${extra["plugin.defaults.version"]}" 13 | id("de.undercouch.download") version "${extra["plugin.download.version"]}" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/SparkplugErrorCode.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception; 15 | 16 | public enum SparkplugErrorCode { 17 | 18 | ALREADY_EXISTS, 19 | FORBIDDEN, 20 | INITIALIZATION_ERROR, 21 | INTERNAL_ERROR, 22 | INVALID_ARGUMENT, 23 | MISSING_FIELDS, 24 | NOT_AUTHORIZED, 25 | NOT_FOUND, 26 | NOT_SUPPORTED, 27 | NOT_SUPPORTED_TYPE, 28 | NULL_FIELD, 29 | PARSE_ERROR 30 | } 31 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/SparkplugException.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception; 15 | 16 | public class SparkplugException extends Exception { 17 | 18 | private static final long serialVersionUID = 1L; 19 | 20 | private SparkplugErrorCode code; 21 | 22 | public SparkplugException() { 23 | super(); 24 | } 25 | 26 | public SparkplugException(SparkplugErrorCode code) { 27 | super(); 28 | this.code = code; 29 | } 30 | 31 | public SparkplugException(SparkplugErrorCode code, String message, Throwable e) { 32 | super("ErrorCode: " + code.toString() + " - Message: " + message, e); 33 | this.code = code; 34 | } 35 | 36 | public SparkplugException(SparkplugErrorCode code, Throwable e) { 37 | super(code.toString(), e); 38 | this.code = code; 39 | } 40 | 41 | public SparkplugException(SparkplugErrorCode code, String message) { 42 | super(message); 43 | this.code = code; 44 | } 45 | 46 | public String getDetails() { 47 | return getMessage(); 48 | } 49 | 50 | public SparkplugErrorCode getSparkplugErrorCode() { 51 | return code; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/SparkplugInvalidTypeException.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception; 15 | 16 | /** 17 | * An Exception caused by an invalid type. 18 | */ 19 | public class SparkplugInvalidTypeException extends SparkplugException { 20 | 21 | private static final long serialVersionUID = 1L; 22 | 23 | public SparkplugInvalidTypeException(Class type) { 24 | super(SparkplugErrorCode.INVALID_ARGUMENT, "Invalid type " + type); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/SparkplugParsingException.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception; 15 | 16 | /** 17 | * An Exception thrown if an error is encountered while parsing a payload or topic. 18 | */ 19 | public class SparkplugParsingException extends SparkplugException { 20 | 21 | private static final long serialVersionUID = 1L; 22 | 23 | /** 24 | * Constructor 25 | * 26 | * @param message an error message 27 | */ 28 | public SparkplugParsingException(String message) { 29 | super(SparkplugErrorCode.INTERNAL_ERROR, message); 30 | } 31 | 32 | /** 33 | * Constructor 34 | * 35 | * @param message an error message 36 | * @param exception an underlying exception 37 | */ 38 | public SparkplugParsingException(String message, Throwable exception) { 39 | super(SparkplugErrorCode.INTERNAL_ERROR, message, exception); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/message/PayloadEncoder.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.message; 15 | 16 | import java.io.IOException; 17 | 18 | /** 19 | * An interface for encoding payloads. 20 | * 21 | * @param

the type of payload. 22 | */ 23 | public interface PayloadEncoder

{ 24 | 25 | /** 26 | * Converts a payload object into a byte array. 27 | * 28 | * @param payload a payload object 29 | * @return the byte array representing the payload 30 | * @throws IOException 31 | */ 32 | public byte[] getBytes(P payload) throws IOException; 33 | } 34 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/DataSetDataType.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.math.BigInteger; 17 | import java.util.Date; 18 | 19 | import org.eclipse.sparkplug.impl.exception.SparkplugInvalidTypeException; 20 | 21 | /** 22 | * A enumeration of data types of values in a {@link DataSet} 23 | */ 24 | public enum DataSetDataType { 25 | 26 | // Basic Types 27 | Int8(1, Byte.class), 28 | Int16(2, Short.class), 29 | Int32(3, Integer.class), 30 | Int64(4, Long.class), 31 | UInt8(5, Short.class), 32 | UInt16(6, Integer.class), 33 | UInt32(7, Long.class), 34 | UInt64(8, BigInteger.class), 35 | Float(9, Float.class), 36 | Double(10, Double.class), 37 | Boolean(11, Boolean.class), 38 | String(12, String.class), 39 | DateTime(13, Date.class), 40 | Text(14, String.class), 41 | 42 | // Unknown 43 | Unknown(0, Object.class); 44 | 45 | private Class clazz = null; 46 | private int intValue = 0; 47 | 48 | private DataSetDataType(int intValue, Class clazz) { 49 | this.intValue = intValue; 50 | this.clazz = clazz; 51 | } 52 | 53 | public void checkType(Object value) throws SparkplugInvalidTypeException { 54 | if (value != null && !clazz.isAssignableFrom(value.getClass())) { 55 | throw new SparkplugInvalidTypeException(value.getClass()); 56 | } 57 | } 58 | 59 | /** 60 | * Returns an integer representation of the data type. 61 | * 62 | * @return an integer representation of the data type. 63 | */ 64 | public int toIntValue() { 65 | return this.intValue; 66 | } 67 | 68 | /** 69 | * Converts the integer representation of the data type into a {@link DataSetDataType} instance. 70 | * 71 | * @param i the integer representation of the data type. 72 | * @return a {@link DataSetDataType} instance. 73 | */ 74 | public static DataSetDataType fromInteger(int i) { 75 | switch (i) { 76 | case 1: 77 | return Int8; 78 | case 2: 79 | return Int16; 80 | case 3: 81 | return Int32; 82 | case 4: 83 | return Int64; 84 | case 5: 85 | return UInt8; 86 | case 6: 87 | return UInt16; 88 | case 7: 89 | return UInt32; 90 | case 8: 91 | return UInt64; 92 | case 9: 93 | return Float; 94 | case 10: 95 | return Double; 96 | case 11: 97 | return Boolean; 98 | case 12: 99 | return String; 100 | case 13: 101 | return DateTime; 102 | case 14: 103 | return Text; 104 | default: 105 | return Unknown; 106 | } 107 | } 108 | 109 | /** 110 | * Returns the class type for this DataType 111 | * 112 | * @return the class type for this DataType 113 | */ 114 | public Class getClazz() { 115 | return clazz; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/DeviceDescriptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2020-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | public class DeviceDescriptor extends EdgeNodeDescriptor { 17 | 18 | private final String deviceId; 19 | private final String descriptorString; 20 | 21 | public DeviceDescriptor(String groupId, String edgeNodeId, String deviceId) { 22 | super(groupId, edgeNodeId); 23 | this.deviceId = deviceId; 24 | this.descriptorString = groupId + "/" + edgeNodeId + "/" + deviceId; 25 | } 26 | 27 | public DeviceDescriptor(String descriptorString) { 28 | super(descriptorString.substring(0, descriptorString.lastIndexOf("/"))); 29 | this.deviceId = descriptorString.substring(descriptorString.lastIndexOf("/") + 1); 30 | this.descriptorString = descriptorString; 31 | } 32 | 33 | public DeviceDescriptor(EdgeNodeDescriptor edgeNodeDescriptor, String deviceId) { 34 | super(edgeNodeDescriptor.getGroupId(), edgeNodeDescriptor.getEdgeNodeId()); 35 | this.deviceId = deviceId; 36 | this.descriptorString = edgeNodeDescriptor.getDescriptorString() + "/" + deviceId; 37 | } 38 | 39 | public String getDeviceId() { 40 | return deviceId; 41 | } 42 | 43 | /** 44 | * Returns a {@link String} representing the Device's Descriptor of the form: 45 | * "//". 46 | * 47 | * @return a {@link String} representing the Device's Descriptor. 48 | */ 49 | @Override 50 | public String getDescriptorString() { 51 | return descriptorString; 52 | } 53 | 54 | public String getEdgeNodeDescriptorString() { 55 | return super.getDescriptorString(); 56 | } 57 | 58 | @Override 59 | public int hashCode() { 60 | return this.getDescriptorString().hashCode(); 61 | } 62 | 63 | @Override 64 | public boolean equals(Object object) { 65 | if (object instanceof DeviceDescriptor) { 66 | return this.getDescriptorString().equals(((DeviceDescriptor) object).getDescriptorString()); 67 | } 68 | return this.getDescriptorString().equals(object); 69 | } 70 | 71 | @Override 72 | public String toString() { 73 | return getDescriptorString(); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/EdgeNodeDescriptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2017-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import com.fasterxml.jackson.annotation.JsonValue; 17 | 18 | /** 19 | * An Edge Node Identifier 20 | */ 21 | public class EdgeNodeDescriptor implements SparkplugDescriptor { 22 | 23 | private final String groupId; 24 | private final String edgeNodeId; 25 | private final String descriptorString; 26 | 27 | public EdgeNodeDescriptor(String groupId, String edgeNodeId) { 28 | this.groupId = groupId; 29 | this.edgeNodeId = edgeNodeId; 30 | this.descriptorString = groupId + "/" + edgeNodeId; 31 | } 32 | 33 | /** 34 | * Creates and EdgeNodeDescriptor from a {@link String} of the form group_name/edge_node_name 35 | * 36 | * @param descriptorString the {@link String} representation of an EdgeNodeDescriptor 37 | */ 38 | public EdgeNodeDescriptor(String descriptorString) { 39 | String[] tokens = descriptorString.split("/"); 40 | this.groupId = tokens[0]; 41 | this.edgeNodeId = tokens[1]; 42 | this.descriptorString = descriptorString; 43 | } 44 | 45 | public String getGroupId() { 46 | return groupId; 47 | } 48 | 49 | public String getEdgeNodeId() { 50 | return edgeNodeId; 51 | } 52 | 53 | /** 54 | * Returns a {@link String} representing the Edge Node's Descriptor of the form: "/". 55 | * 56 | * @return a {@link String} representing the Edge Node's Descriptor. 57 | */ 58 | @Override 59 | public String getDescriptorString() { 60 | return descriptorString; 61 | } 62 | 63 | @Override 64 | public int hashCode() { 65 | return this.getDescriptorString().hashCode(); 66 | } 67 | 68 | @Override 69 | public boolean equals(Object object) { 70 | if (object instanceof EdgeNodeDescriptor) { 71 | return this.getDescriptorString().equals(((EdgeNodeDescriptor) object).getDescriptorString()); 72 | } 73 | return this.getDescriptorString().equals(object); 74 | } 75 | 76 | @Override 77 | @JsonValue 78 | public String toString() { 79 | return getDescriptorString(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/File.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.util.Arrays; 17 | 18 | import com.fasterxml.jackson.annotation.JsonIgnoreProperties; 19 | import com.fasterxml.jackson.databind.annotation.JsonSerialize; 20 | import com.fasterxml.jackson.databind.ser.std.FileSerializer; 21 | 22 | @JsonIgnoreProperties( 23 | value = { "fileName" }) 24 | @JsonSerialize( 25 | using = FileSerializer.class) 26 | public class File { 27 | 28 | private String fileName; 29 | private byte[] bytes; 30 | 31 | public File() { 32 | super(); 33 | } 34 | 35 | public File(String fileName, byte[] bytes) { 36 | super(); 37 | this.fileName = fileName == null 38 | ? null 39 | : fileName.replace("/", System.getProperty("file.separator")).replace("\\", 40 | System.getProperty("file.separator")); 41 | this.bytes = Arrays.copyOf(bytes, bytes.length); 42 | } 43 | 44 | public String getFileName() { 45 | return fileName; 46 | } 47 | 48 | public void setFileName(String fileName) { 49 | this.fileName = fileName; 50 | } 51 | 52 | public byte[] getBytes() { 53 | return bytes; 54 | } 55 | 56 | public void setBytes(byte[] bytes) { 57 | this.bytes = bytes; 58 | } 59 | 60 | @Override 61 | public String toString() { 62 | StringBuilder builder = new StringBuilder(); 63 | builder.append("File [fileName="); 64 | builder.append(fileName); 65 | builder.append(", bytes="); 66 | builder.append(Arrays.toString(bytes)); 67 | builder.append("]"); 68 | return builder.toString(); 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/MessageType.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import org.eclipse.sparkplug.impl.exception.SparkplugParsingException; 17 | 18 | /** 19 | * An enumeration of Sparkplug MQTT message types. The type provides an indication as to what the MQTT Payload of 20 | * message will contain. 21 | */ 22 | public enum MessageType { 23 | 24 | /** 25 | * Birth certificate for MQTT Edge of Network (EoN) Nodes. 26 | */ 27 | NBIRTH, 28 | 29 | /** 30 | * Death certificate for MQTT Edge of Network (EoN) Nodes. 31 | */ 32 | NDEATH, 33 | 34 | /** 35 | * Birth certificate for MQTT Devices. 36 | */ 37 | DBIRTH, 38 | 39 | /** 40 | * Death certificate for MQTT Devices. 41 | */ 42 | DDEATH, 43 | 44 | /** 45 | * Edge of Network (EoN) Node data message. 46 | */ 47 | NDATA, 48 | 49 | /** 50 | * Device data message. 51 | */ 52 | DDATA, 53 | 54 | /** 55 | * Edge of Network (EoN) Node command message. 56 | */ 57 | NCMD, 58 | 59 | /** 60 | * Device command message. 61 | */ 62 | DCMD, 63 | 64 | /** 65 | * Critical application state message. 66 | */ 67 | STATE, 68 | 69 | /** 70 | * Device record message. 71 | */ 72 | DRECORD, 73 | 74 | /** 75 | * Edge of Network (EoN) Node record message. 76 | */ 77 | NRECORD; 78 | 79 | public static MessageType parseMessageType(String type) throws SparkplugParsingException { 80 | for (MessageType messageType : MessageType.values()) { 81 | if (messageType.name().equals(type)) { 82 | return messageType; 83 | } 84 | } 85 | throw new SparkplugParsingException("Invalid message type: " + type); 86 | } 87 | 88 | public boolean isDeath() { 89 | return this.equals(DDEATH) || this.equals(NDEATH); 90 | } 91 | 92 | public boolean isCommand() { 93 | return this.equals(DCMD) || this.equals(NCMD); 94 | } 95 | 96 | public boolean isData() { 97 | return this.equals(DDATA) || this.equals(NDATA); 98 | } 99 | 100 | public boolean isBirth() { 101 | return this.equals(DBIRTH) || this.equals(NBIRTH); 102 | } 103 | 104 | public boolean isRecord() { 105 | return this.equals(DRECORD) || this.equals(NRECORD); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/Parameter.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.util.Objects; 17 | 18 | import org.eclipse.sparkplug.impl.exception.SparkplugInvalidTypeException; 19 | 20 | import com.fasterxml.jackson.annotation.JsonGetter; 21 | import com.fasterxml.jackson.annotation.JsonProperty; 22 | import com.fasterxml.jackson.annotation.JsonSetter; 23 | 24 | /** 25 | * A class to represent a parameter associated with a template. 26 | */ 27 | public class Parameter { 28 | 29 | /** 30 | * The name of the parameter 31 | */ 32 | @JsonProperty("name") 33 | private String name; 34 | 35 | /** 36 | * The data type of the parameter 37 | */ 38 | @JsonProperty("type") 39 | private ParameterDataType type; 40 | 41 | /** 42 | * The value of the parameter 43 | */ 44 | @JsonProperty("value") 45 | private Object value; 46 | 47 | public Parameter() { 48 | } 49 | 50 | /** 51 | * Constructs a Parameter instance. 52 | * 53 | * @param name The name of the parameter. 54 | * @param type The type of the parameter. 55 | * @param value The value of the parameter. 56 | * @throws SparkplugInvalidTypeException 57 | */ 58 | public Parameter(String name, ParameterDataType type, Object value) throws SparkplugInvalidTypeException { 59 | this.name = name; 60 | this.type = type; 61 | this.value = value; 62 | if (value != null) { 63 | this.type.checkType(value); 64 | } 65 | } 66 | 67 | @JsonGetter("name") 68 | public String getName() { 69 | return name; 70 | } 71 | 72 | @JsonSetter("name") 73 | public void setName(String name) { 74 | this.name = name; 75 | } 76 | 77 | public ParameterDataType getType() { 78 | return type; 79 | } 80 | 81 | public void setType(ParameterDataType type) { 82 | this.type = type; 83 | } 84 | 85 | public Object getValue() { 86 | return value; 87 | } 88 | 89 | public void setValue(Object value) { 90 | this.value = value; 91 | } 92 | 93 | @Override 94 | public boolean equals(Object object) { 95 | if (this == object) { 96 | return true; 97 | } 98 | if (object == null || this.getClass() != object.getClass()) { 99 | return false; 100 | } 101 | Parameter param = (Parameter) object; 102 | return Objects.equals(name, param.getName()) && Objects.equals(type, param.getType()) 103 | && Objects.equals(value, param.getValue()); 104 | } 105 | 106 | @Override 107 | public String toString() { 108 | StringBuilder builder = new StringBuilder(); 109 | builder.append("Parameter [name="); 110 | builder.append(name); 111 | builder.append(", type="); 112 | builder.append(type); 113 | builder.append(", value="); 114 | builder.append(value); 115 | builder.append("]"); 116 | return builder.toString(); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/ParameterDataType.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.math.BigInteger; 17 | import java.util.Date; 18 | 19 | import org.eclipse.sparkplug.impl.exception.SparkplugInvalidTypeException; 20 | import org.slf4j.Logger; 21 | import org.slf4j.LoggerFactory; 22 | 23 | /** 24 | * An enumeration of data types for the value of a {@link Parameter} for a {@link Template} 25 | */ 26 | public enum ParameterDataType { 27 | 28 | // Basic Types 29 | Int8(1, Byte.class), 30 | Int16(2, Short.class), 31 | Int32(3, Integer.class), 32 | Int64(4, Long.class), 33 | UInt8(5, Short.class), 34 | UInt16(6, Integer.class), 35 | UInt32(7, Long.class), 36 | UInt64(8, BigInteger.class), 37 | Float(9, Float.class), 38 | Double(10, Double.class), 39 | Boolean(11, Boolean.class), 40 | String(12, String.class), 41 | DateTime(13, Date.class), 42 | Text(14, String.class), 43 | 44 | // Unknown 45 | Unknown(0, Object.class); 46 | 47 | private static final Logger logger = LoggerFactory.getLogger(ParameterDataType.class.getName()); 48 | 49 | private Class clazz = null; 50 | private int intValue = 0; 51 | 52 | private ParameterDataType(int intValue, Class clazz) { 53 | this.intValue = intValue; 54 | this.clazz = clazz; 55 | } 56 | 57 | public void checkType(Object value) throws SparkplugInvalidTypeException { 58 | if (value != null && !clazz.isAssignableFrom(value.getClass())) { 59 | logger.warn("Failed type check - " + clazz + " != " + value.getClass().toString()); 60 | throw new SparkplugInvalidTypeException(value.getClass()); 61 | } 62 | } 63 | 64 | /** 65 | * Returns an integer representation of the data type. 66 | * 67 | * @return an integer representation of the data type. 68 | */ 69 | public int toIntValue() { 70 | return this.intValue; 71 | } 72 | 73 | /** 74 | * Converts the integer representation of the data type into a {@link ParameterDataType} instance. 75 | * 76 | * @param i the integer representation of the data type. 77 | * @return a {@link ParameterDataType} instance. 78 | */ 79 | public static ParameterDataType fromInteger(int i) { 80 | switch (i) { 81 | case 1: 82 | return Int8; 83 | case 2: 84 | return Int16; 85 | case 3: 86 | return Int32; 87 | case 4: 88 | return Int64; 89 | case 5: 90 | return UInt8; 91 | case 6: 92 | return UInt16; 93 | case 7: 94 | return UInt32; 95 | case 8: 96 | return UInt64; 97 | case 9: 98 | return Float; 99 | case 10: 100 | return Double; 101 | case 11: 102 | return Boolean; 103 | case 12: 104 | return String; 105 | case 13: 106 | return DateTime; 107 | case 14: 108 | return Text; 109 | default: 110 | return Unknown; 111 | } 112 | } 113 | 114 | /** 115 | * Returns the class type for this DataType 116 | * 117 | * @return the class type for this DataType 118 | */ 119 | public Class getClazz() { 120 | return clazz; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/PropertyDataType.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.math.BigInteger; 17 | import java.util.Date; 18 | import java.util.List; 19 | 20 | import org.eclipse.sparkplug.impl.exception.SparkplugInvalidTypeException; 21 | import org.slf4j.Logger; 22 | import org.slf4j.LoggerFactory; 23 | 24 | /** 25 | * An enumeration of data types for values of a {@link PropertySet} 26 | */ 27 | public enum PropertyDataType { 28 | 29 | // Basic Types 30 | Int8(1, Byte.class), 31 | Int16(2, Short.class), 32 | Int32(3, Integer.class), 33 | Int64(4, Long.class), 34 | UInt8(5, Short.class), 35 | UInt16(6, Integer.class), 36 | UInt32(7, Long.class), 37 | UInt64(8, BigInteger.class), 38 | Float(9, Float.class), 39 | Double(10, Double.class), 40 | Boolean(11, Boolean.class), 41 | String(12, String.class), 42 | DateTime(13, Date.class), 43 | Text(14, String.class), 44 | 45 | // Custom Types for PropertySets 46 | PropertySet(20, PropertySet.class), 47 | PropertySetList(21, List.class), 48 | 49 | // Unknown 50 | Unknown(0, Object.class); 51 | 52 | private static final Logger logger = LoggerFactory.getLogger(PropertyDataType.class.getName()); 53 | 54 | private Class clazz = null; 55 | private int intValue = 0; 56 | 57 | private PropertyDataType(int intValue, Class clazz) { 58 | this.intValue = intValue; 59 | this.clazz = clazz; 60 | } 61 | 62 | public void checkType(Object value) throws SparkplugInvalidTypeException { 63 | if (value != null && !clazz.isAssignableFrom(value.getClass())) { 64 | if (clazz == List.class && value instanceof List) { 65 | // Allow List subclasses 66 | } else { 67 | logger.warn("Failed type check - " + clazz + " != " + value.getClass().toString()); 68 | throw new SparkplugInvalidTypeException(value.getClass()); 69 | } 70 | } 71 | } 72 | 73 | /** 74 | * Returns an integer representation of the data type. 75 | * 76 | * @return an integer representation of the data type. 77 | */ 78 | public int toIntValue() { 79 | return this.intValue; 80 | } 81 | 82 | /** 83 | * Converts the integer representation of the data type into a {@link PropertyDataType} instance. 84 | * 85 | * @param i the integer representation of the data type. 86 | * @return a {@link PropertyDataType} instance. 87 | */ 88 | public static PropertyDataType fromInteger(int i) { 89 | switch (i) { 90 | case 1: 91 | return Int8; 92 | case 2: 93 | return Int16; 94 | case 3: 95 | return Int32; 96 | case 4: 97 | return Int64; 98 | case 5: 99 | return UInt8; 100 | case 6: 101 | return UInt16; 102 | case 7: 103 | return UInt32; 104 | case 8: 105 | return UInt64; 106 | case 9: 107 | return Float; 108 | case 10: 109 | return Double; 110 | case 11: 111 | return Boolean; 112 | case 12: 113 | return String; 114 | case 13: 115 | return DateTime; 116 | case 14: 117 | return Text; 118 | case 20: 119 | return PropertySet; 120 | case 21: 121 | return PropertySetList; 122 | default: 123 | return Unknown; 124 | } 125 | } 126 | 127 | /** 128 | * Returns the class type for this DataType 129 | * 130 | * @return the class type for this DataType 131 | */ 132 | public Class getClazz() { 133 | return clazz; 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/PropertyValue.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.util.Objects; 17 | 18 | import org.eclipse.sparkplug.impl.exception.SparkplugInvalidTypeException; 19 | 20 | import com.fasterxml.jackson.annotation.JsonIgnore; 21 | 22 | /** 23 | * The value of a property in a {@link PropertySet}. 24 | */ 25 | public class PropertyValue { 26 | 27 | private PropertyDataType type; 28 | private Object value; 29 | private Boolean isNull = null; 30 | 31 | public PropertyValue() { 32 | } 33 | 34 | /** 35 | * A constructor. 36 | * 37 | * @param type the property type 38 | * @param value the property value 39 | * @throws SparkplugInvalidTypeException 40 | */ 41 | public PropertyValue(PropertyDataType type, Object value) throws SparkplugInvalidTypeException { 42 | this.type = type; 43 | this.value = value; 44 | isNull = (value == null) ? true : false; 45 | type.checkType(value); 46 | } 47 | 48 | public PropertyDataType getType() { 49 | return type; 50 | } 51 | 52 | public void setType(PropertyDataType type) { 53 | this.type = type; 54 | } 55 | 56 | public Object getValue() { 57 | return value; 58 | } 59 | 60 | public void setValue(Object value) { 61 | this.value = value; 62 | isNull = (value == null) ? true : false; 63 | } 64 | 65 | @JsonIgnore 66 | public Boolean isNull() { 67 | return isNull; 68 | } 69 | 70 | @Override 71 | public boolean equals(Object object) { 72 | if (this == object) { 73 | return true; 74 | } 75 | if (object == null || this.getClass() != object.getClass()) { 76 | return false; 77 | } 78 | PropertyValue propValue = (PropertyValue) object; 79 | return Objects.equals(type, propValue.getType()) && Objects.equals(value, propValue.getValue()); 80 | } 81 | 82 | @Override 83 | public String toString() { 84 | StringBuilder builder = new StringBuilder(); 85 | builder.append("PropertyValue [type="); 86 | builder.append(type); 87 | builder.append(", value="); 88 | builder.append(value); 89 | builder.append(", isNull="); 90 | builder.append(isNull); 91 | builder.append("]"); 92 | return builder.toString(); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/Row.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | import java.util.ArrayList; 17 | import java.util.Collection; 18 | import java.util.List; 19 | 20 | /** 21 | * A class for representing a row of a data set. 22 | */ 23 | public class Row { 24 | 25 | private List> values; 26 | 27 | public Row() { 28 | this.values = new ArrayList<>(); 29 | } 30 | 31 | public Row(List> values) { 32 | this.values = values; 33 | } 34 | 35 | public List> getValues() { 36 | return values; 37 | } 38 | 39 | public void setValues(List> values) { 40 | this.values = values; 41 | } 42 | 43 | public void addValue(Value value) { 44 | this.values.add(value); 45 | } 46 | 47 | @Override 48 | public String toString() { 49 | return "Row [values=" + values + "]"; 50 | } 51 | 52 | /** 53 | * Converts a {@link Row} instance to a {@link List} of Objects representing the values. 54 | * 55 | * @param row a {@link Row} instance. 56 | * @return a {@link List} of Objects. 57 | */ 58 | public static List toValues(Row row) { 59 | List list = new ArrayList(row.getValues().size()); 60 | for (Value value : row.getValues()) { 61 | list.add(value.getValue()); 62 | } 63 | return list; 64 | } 65 | 66 | /** 67 | * A builder for creating a {@link Row} instance. 68 | */ 69 | public static class RowBuilder { 70 | 71 | private List> values; 72 | 73 | public RowBuilder() { 74 | this.values = new ArrayList>(); 75 | } 76 | 77 | public RowBuilder(Row row) { 78 | this.values = new ArrayList>(row.getValues()); 79 | } 80 | 81 | public RowBuilder addValue(Value value) { 82 | this.values.add(value); 83 | return this; 84 | } 85 | 86 | public RowBuilder addValues(Collection> values) { 87 | this.values.addAll(values); 88 | return this; 89 | } 90 | 91 | public Row createRow() { 92 | return new Row(values); 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/SparkplugDescriptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2020-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | public interface SparkplugDescriptor { 17 | 18 | /** 19 | * Returns the String representation of this {@link SparkplugDescriptor} 20 | * 21 | * @return the String representation of this {@link SparkplugDescriptor} 22 | */ 23 | public String getDescriptorString(); 24 | } 25 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/SparkplugMeta.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | public class SparkplugMeta { 17 | 18 | public static final String SPARKPLUG_B_TOPIC_PREFIX = "spBv1.0"; 19 | public static final String SPARKPLUG_TOPIC_HOST_STATE_TOKEN = "STATE"; 20 | public static final String SPARKPLUG_TOPIC_HOST_STATE_PREFIX = 21 | SPARKPLUG_B_TOPIC_PREFIX + "/" + SPARKPLUG_TOPIC_HOST_STATE_TOKEN; 22 | public static final String SPARKPLUG_SEQUENCE_NUMBER_KEY = "seq"; 23 | public static final String SPARKPLUG_BD_SEQUENCE_NUMBER_KEY = "bdSeq"; 24 | 25 | // Properties 26 | public static final String QUALITY_PROP_NAME = "Quality"; 27 | 28 | // Well Known Metrics 29 | public static final String METRIC_NODE_CONTROL = "Node Control"; 30 | public static final String METRIC_NODE_REBIRTH = METRIC_NODE_CONTROL + "/" + "Rebirth"; 31 | } 32 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/impl/exception/model/Value.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2014-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.impl.exception.model; 15 | 16 | public class Value { 17 | 18 | private DataSetDataType type; 19 | private V value; 20 | 21 | public Value() { 22 | super(); 23 | } 24 | 25 | public Value(DataSetDataType type, V value) { 26 | super(); 27 | this.type = type; 28 | this.value = value; 29 | } 30 | 31 | public DataSetDataType getType() { 32 | return type; 33 | } 34 | 35 | public void setType(DataSetDataType type) { 36 | this.type = type; 37 | } 38 | 39 | public V getValue() { 40 | return value; 41 | } 42 | 43 | public void setValue(V value) { 44 | this.value = value; 45 | } 46 | 47 | @Override 48 | public String toString() { 49 | StringBuilder builder = new StringBuilder(); 50 | builder.append("Value [type="); 51 | builder.append(type); 52 | builder.append(", value="); 53 | builder.append(value); 54 | builder.append("]"); 55 | return builder.toString(); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/ConnectInterceptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import org.eclipse.sparkplug.tck.test.TCK; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.hivemq.extension.sdk.api.annotations.NotNull; 21 | import com.hivemq.extension.sdk.api.interceptor.connect.ConnectInboundInterceptor; 22 | import com.hivemq.extension.sdk.api.interceptor.connect.parameter.ConnectInboundInput; 23 | import com.hivemq.extension.sdk.api.interceptor.connect.parameter.ConnectInboundOutput; 24 | import com.hivemq.extension.sdk.api.packets.connect.ConnectPacket; 25 | 26 | /** 27 | * @author Ian Craggs 28 | * @author Lukas Brand 29 | */ 30 | public class ConnectInterceptor implements ConnectInboundInterceptor { 31 | 32 | private static final @NotNull Logger logger = LoggerFactory.getLogger("Sparkplug"); 33 | 34 | private final @NotNull TCK theTCK; 35 | 36 | public ConnectInterceptor(final @NotNull TCK theTCK) { 37 | this.theTCK = theTCK; 38 | } 39 | 40 | @Override 41 | public void onConnect(final @NotNull ConnectInboundInput connectInboundInput, 42 | final @NotNull ConnectInboundOutput connectInboundOutput) { 43 | try { 44 | final String clientId = connectInboundInput.getClientInformation().getClientId(); 45 | 46 | logger.debug("Inbound connect from '{}'", clientId); 47 | logger.debug("\tInet Address {}", connectInboundInput.getConnectionInformation().getInetAddress()); 48 | logger.debug("\tMQTT Version {}", connectInboundInput.getConnectionInformation().getMqttVersion()); 49 | logger.debug("\tClean Start {}", connectInboundInput.getConnectPacket().getCleanStart()); 50 | logger.debug("\tKeep Alive {}", connectInboundInput.getConnectPacket().getKeepAlive()); 51 | 52 | final ConnectPacket packet = connectInboundInput.getConnectPacket(); 53 | theTCK.connect(clientId, packet); 54 | 55 | } catch (final Exception e) { 56 | logger.error("Connect Exception", e); 57 | } 58 | } 59 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/DisconnectInterceptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import com.hivemq.extension.sdk.api.annotations.NotNull; 17 | import com.hivemq.extension.sdk.api.interceptor.disconnect.DisconnectInboundInterceptor; 18 | import com.hivemq.extension.sdk.api.interceptor.disconnect.parameter.DisconnectInboundInput; 19 | import com.hivemq.extension.sdk.api.interceptor.disconnect.parameter.DisconnectInboundOutput; 20 | import com.hivemq.extension.sdk.api.packets.disconnect.DisconnectPacket; 21 | import org.eclipse.sparkplug.tck.test.TCK; 22 | import org.slf4j.Logger; 23 | import org.slf4j.LoggerFactory; 24 | 25 | /** 26 | * @author Ian Craggs 27 | * @author Lukas Brand 28 | */ 29 | public class DisconnectInterceptor implements DisconnectInboundInterceptor { 30 | 31 | private static final @NotNull Logger logger = LoggerFactory.getLogger("Sparkplug"); 32 | 33 | private final @NotNull TCK theTCK; 34 | 35 | public DisconnectInterceptor(final @NotNull TCK theTCK) { 36 | this.theTCK = theTCK; 37 | } 38 | 39 | @Override 40 | public void onInboundDisconnect(final @NotNull DisconnectInboundInput disconnectInboundInput, 41 | final @NotNull DisconnectInboundOutput disconnectInboundOutput) { 42 | try { 43 | final String clientId = disconnectInboundInput.getClientInformation().getClientId(); 44 | 45 | logger.debug("Inbound disconnect from '{}'", clientId); 46 | 47 | final DisconnectPacket packet = disconnectInboundInput.getDisconnectPacket(); 48 | theTCK.disconnect(clientId, packet); 49 | } catch (final Exception e) { 50 | logger.error("Disconnect Exception", e); 51 | } 52 | } 53 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/PublishInterceptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import static org.eclipse.sparkplug.tck.test.common.Constants.TCK_CONSOLE_TEST_CONTROL_TOPIC; 17 | import static org.eclipse.sparkplug.tck.test.common.Constants.TCK_LOG_TOPIC; 18 | import static org.eclipse.sparkplug.tck.test.common.Utils.tokenize; 19 | 20 | import java.nio.ByteBuffer; 21 | import java.nio.charset.StandardCharsets; 22 | 23 | import org.eclipse.sparkplug.tck.test.TCK; 24 | import org.eclipse.sparkplug.tck.test.common.Constants.Profile; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import com.hivemq.extension.sdk.api.annotations.NotNull; 29 | import com.hivemq.extension.sdk.api.interceptor.publish.PublishInboundInterceptor; 30 | import com.hivemq.extension.sdk.api.interceptor.publish.PublishOutboundInterceptor; 31 | import com.hivemq.extension.sdk.api.interceptor.publish.parameter.PublishInboundInput; 32 | import com.hivemq.extension.sdk.api.interceptor.publish.parameter.PublishInboundOutput; 33 | import com.hivemq.extension.sdk.api.interceptor.publish.parameter.PublishOutboundInput; 34 | import com.hivemq.extension.sdk.api.interceptor.publish.parameter.PublishOutboundOutput; 35 | import com.hivemq.extension.sdk.api.packets.publish.PublishPacket; 36 | 37 | /** 38 | * @author Ian Craggs 39 | * @author Lukas Brand 40 | */ 41 | public class PublishInterceptor implements PublishInboundInterceptor { 42 | 43 | private final static @NotNull Logger logger = LoggerFactory.getLogger("Sparkplug"); 44 | 45 | private final TCK theTCK; 46 | 47 | public PublishInterceptor(final @NotNull TCK theTCK) { 48 | this.theTCK = theTCK; 49 | } 50 | 51 | @Override 52 | public void onInboundPublish(final @NotNull PublishInboundInput publishInboundInput, 53 | final @NotNull PublishInboundOutput publishInboundOutput) { 54 | try { 55 | final String clientId = publishInboundInput.getClientInformation().getClientId(); 56 | final PublishPacket packet = publishInboundInput.getPublishPacket(); 57 | final String topic = packet.getTopic(); 58 | logger.debug("Inbound publish from '{}' at {} ", clientId, topic); 59 | if (packet.getPayload().isPresent()) { 60 | final ByteBuffer payloadByteBuffer = packet.getPayload().get(); 61 | final String payload = StandardCharsets.UTF_8.decode(payloadByteBuffer).toString(); 62 | logger.trace("\tPayload {}", payload); 63 | 64 | if (topic.equals(TCK_LOG_TOPIC)) { 65 | logger.debug(clientId + ": " + payload); // display log message 66 | } 67 | 68 | if (topic.equals(TCK_CONSOLE_TEST_CONTROL_TOPIC)) { 69 | String cmd = "NEW_TEST"; 70 | if (payload.toUpperCase().startsWith(cmd)) { 71 | final String[] strings = tokenize(payload.trim()); 72 | if (strings.length < 3) { 73 | throw new RuntimeException("New test syntax is: NEW_TEST profile testname "); 74 | } 75 | 76 | final int no_parms = strings.length - 3; 77 | final String[] parms = new String[no_parms]; 78 | if (no_parms > 0) { 79 | System.arraycopy(strings, 3, parms, 0, no_parms); 80 | } 81 | theTCK.newTest(Profile.valueOf(strings[1].toUpperCase()), strings[2], parms); 82 | } else { 83 | cmd = "END_TEST"; 84 | if (payload.toUpperCase().trim().equals(cmd)) { 85 | theTCK.endTest(); 86 | } 87 | } 88 | } else 89 | theTCK.publish(clientId, packet); 90 | } 91 | } catch (final Exception e) { 92 | logger.error("Publish Exception", e); 93 | } 94 | } 95 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/SparkplugClientLifecycleEventListener.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import org.eclipse.sparkplug.tck.test.TCK; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.hivemq.extension.sdk.api.annotations.NotNull; 21 | import com.hivemq.extension.sdk.api.events.client.ClientLifecycleEventListener; 22 | import com.hivemq.extension.sdk.api.events.client.parameters.AuthenticationSuccessfulInput; 23 | import com.hivemq.extension.sdk.api.events.client.parameters.ConnectionStartInput; 24 | import com.hivemq.extension.sdk.api.events.client.parameters.DisconnectEventInput; 25 | 26 | public class SparkplugClientLifecycleEventListener implements ClientLifecycleEventListener { 27 | 28 | private final static @NotNull Logger logger = LoggerFactory.getLogger("Sparkplug"); 29 | private final TCK theTCK; 30 | 31 | public SparkplugClientLifecycleEventListener(TCK aTCK) { 32 | theTCK = aTCK; 33 | } 34 | 35 | @Override 36 | public void onMqttConnectionStart(ConnectionStartInput connectionStartInput) { 37 | // logger.info("Client {} connects.", connectionStartInput.getConnectPacket().getClientId()); 38 | theTCK.onMqttConnectionStart(connectionStartInput); 39 | } 40 | 41 | @Override 42 | public void onAuthenticationSuccessful(AuthenticationSuccessfulInput authenticationSuccessfulInput) { 43 | // logger.info("Client {} authenticated successfully.", 44 | // authenticationSuccessfulInput.getClientInformation().getClientId()); 45 | theTCK.onAuthenticationSuccessful(authenticationSuccessfulInput); 46 | } 47 | 48 | @Override 49 | public void onDisconnect(DisconnectEventInput disconnectEventInput) { 50 | // logger.info("Client {} disconnected.", disconnectEventInput.getClientInformation().getClientId()); 51 | theTCK.onDisconnect(disconnectEventInput); 52 | } 53 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/SparkplugClientLifecycleEventListenerProvider.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import org.eclipse.sparkplug.tck.test.TCK; 17 | 18 | import com.hivemq.extension.sdk.api.events.client.ClientLifecycleEventListenerProvider; 19 | import com.hivemq.extension.sdk.api.events.client.parameters.ClientLifecycleEventListenerProviderInput; 20 | 21 | public class SparkplugClientLifecycleEventListenerProvider implements ClientLifecycleEventListenerProvider { 22 | 23 | TCK theTCK; 24 | 25 | public SparkplugClientLifecycleEventListenerProvider(TCK aTCK) { 26 | theTCK = aTCK; 27 | } 28 | 29 | @Override 30 | public SparkplugClientLifecycleEventListener getClientLifecycleEventListener( 31 | ClientLifecycleEventListenerProviderInput input) { 32 | return new SparkplugClientLifecycleEventListener(theTCK); 33 | } 34 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/SparkplugHiveMQExtension.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import org.eclipse.sparkplug.tck.test.TCK; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.hivemq.extension.sdk.api.ExtensionMain; 21 | import com.hivemq.extension.sdk.api.annotations.NotNull; 22 | import com.hivemq.extension.sdk.api.parameter.ExtensionStartInput; 23 | import com.hivemq.extension.sdk.api.parameter.ExtensionStartOutput; 24 | import com.hivemq.extension.sdk.api.parameter.ExtensionStopInput; 25 | import com.hivemq.extension.sdk.api.parameter.ExtensionStopOutput; 26 | import com.hivemq.extension.sdk.api.services.Services; 27 | import com.hivemq.extension.sdk.api.services.intializer.ClientInitializer; 28 | 29 | /** 30 | * @author Ian Craggs 31 | * @author Lukas Brand 32 | */ 33 | public class SparkplugHiveMQExtension implements ExtensionMain { 34 | 35 | private static final @NotNull Logger logger = LoggerFactory.getLogger("Sparkplug"); 36 | 37 | @Override 38 | public void extensionStart(final @NotNull ExtensionStartInput extensionStartInput, 39 | final @NotNull ExtensionStartOutput extensionStartOutput) { 40 | 41 | try { 42 | logger.info("Starting Sparkplug TCK Extension"); 43 | 44 | final TCK aTCK = new TCK(); 45 | 46 | final ConnectInterceptor connectInterceptor = new ConnectInterceptor(aTCK); 47 | Services.interceptorRegistry().setConnectInboundInterceptorProvider(input -> connectInterceptor); 48 | 49 | final SubscribeInterceptor subscribeInterceptor = new SubscribeInterceptor(aTCK); 50 | final PublishInterceptor publishInterceptor = new PublishInterceptor(aTCK); 51 | final DisconnectInterceptor disconnectInterceptor = new DisconnectInterceptor(aTCK); 52 | 53 | // create a new client initializer 54 | final ClientInitializer clientInitializer = (initializerInput, clientContext) -> { 55 | // add the interceptors to the context of the connecting client 56 | clientContext.addSubscribeInboundInterceptor(subscribeInterceptor); 57 | clientContext.addPublishInboundInterceptor(publishInterceptor); 58 | clientContext.addDisconnectInboundInterceptor(disconnectInterceptor); 59 | }; 60 | 61 | // register the client initializer 62 | Services.initializerRegistry().setClientInitializer(clientInitializer); 63 | 64 | Services.eventRegistry() 65 | .setClientLifecycleEventListener(new SparkplugClientLifecycleEventListenerProvider(aTCK)); 66 | 67 | } catch (final Exception e) { 68 | logger.error("Exception thrown at extension start: ", e); 69 | } 70 | } 71 | 72 | @Override 73 | public void extensionStop(final @NotNull ExtensionStopInput extensionStopInput, 74 | final @NotNull ExtensionStopOutput extensionStopOutput) { 75 | logger.info("Stopping Sparkplug TCK Extension"); 76 | } 77 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/SubscribeInterceptor.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************** 2 | * Copyright (c) 2021-2022 Cirrus Link Solutions and others 3 | * 4 | * This program and the accompanying materials are made available under the 5 | * terms of the Eclipse Public License 2.0 which is available at 6 | * http://www.eclipse.org/legal/epl-2.0. 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Cirrus Link Solutions - initial implementation 12 | ********************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck; 15 | 16 | import org.eclipse.sparkplug.tck.test.TCK; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import com.hivemq.extension.sdk.api.annotations.NotNull; 21 | import com.hivemq.extension.sdk.api.interceptor.subscribe.SubscribeInboundInterceptor; 22 | import com.hivemq.extension.sdk.api.interceptor.subscribe.parameter.SubscribeInboundInput; 23 | import com.hivemq.extension.sdk.api.interceptor.subscribe.parameter.SubscribeInboundOutput; 24 | import com.hivemq.extension.sdk.api.packets.subscribe.SubscribePacket; 25 | 26 | public class SubscribeInterceptor implements SubscribeInboundInterceptor { 27 | 28 | private static Logger logger = LoggerFactory.getLogger("Sparkplug"); 29 | private TCK theTCK = null; 30 | 31 | public SubscribeInterceptor(TCK aTCK) { 32 | theTCK = aTCK; 33 | } 34 | 35 | @Override 36 | public void onInboundSubscribe(@NotNull SubscribeInboundInput subscribeInboundInput, 37 | @NotNull SubscribeInboundOutput subscribeInboundOutput) { 38 | try { 39 | String clientId = subscribeInboundInput.getClientInformation().getClientId(); 40 | 41 | SubscribePacket packet = subscribeInboundInput.getSubscribePacket(); 42 | 43 | logger.debug("Inbound subscribe from '{}' topic {}", clientId, 44 | packet.getSubscriptions().get(0).getTopicFilter()); 45 | 46 | theTCK.subscribe(clientId, packet); 47 | } catch (Exception e) { 48 | logger.error("Exception", e); 49 | } 50 | } 51 | } -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/BrokerAwareFeatureTester.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 2022 Anja Helmbrecht-Schaar HiveMQ 3 | * All rights reserved. This program and the accompanying materials 4 | * are made available under the terms of the Eclipse Public License v2.0 5 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 6 | * 7 | * SPDX-License-Identifier: EPL-2.0 8 | * 9 | * Contributors: 10 | * Anja Helmbrecht-Schaar HiveMQ - initial implementation and documentation 11 | *******************************************************************************/ 12 | 13 | package org.eclipse.sparkplug.tck.test.broker.test; 14 | 15 | import java.nio.ByteBuffer; 16 | 17 | import org.jetbrains.annotations.NotNull; 18 | import org.jetbrains.annotations.Nullable; 19 | 20 | import com.hivemq.client.mqtt.MqttClientSslConfig; 21 | import com.hivemq.client.mqtt.mqtt3.Mqtt3Client; 22 | import com.hivemq.client.mqtt.mqtt3.Mqtt3ClientBuilder; 23 | import com.hivemq.client.mqtt.mqtt3.message.auth.Mqtt3SimpleAuth; 24 | import com.hivemq.client.mqtt.mqtt5.message.auth.Mqtt5SimpleAuth; 25 | 26 | public class BrokerAwareFeatureTester { 27 | 28 | private final String host; 29 | private final int port; 30 | private final String username; 31 | private final ByteBuffer password; 32 | private final MqttClientSslConfig sslConfig; 33 | 34 | public BrokerAwareFeatureTester(final @NotNull String host, final @NotNull Integer port, 35 | final @Nullable String username, final @Nullable ByteBuffer password, 36 | final @Nullable MqttClientSslConfig sslConfig, final int timeOut) { 37 | this.host = host; 38 | this.port = port; 39 | this.username = username; 40 | this.password = password; 41 | this.sslConfig = sslConfig; 42 | } 43 | 44 | public void finish(Mqtt3Client subscriber) { 45 | disconnectIfConnected(subscriber); 46 | } 47 | 48 | // Helpers 49 | 50 | public @NotNull Mqtt3ClientBuilder getClientBuilder(String identifier) { 51 | return Mqtt3Client.builder().identifier(identifier).serverHost(host).serverPort(port).simpleAuth(buildAuth()) 52 | .sslConfig(sslConfig); 53 | } 54 | 55 | private @Nullable Mqtt3SimpleAuth buildAuth() { 56 | if (username != null && password != null) { 57 | return Mqtt3SimpleAuth.builder().username(username).password(password).build(); 58 | } else if (username != null) { 59 | Mqtt5SimpleAuth.builder().username(username).build(); 60 | } else if (password != null) { 61 | throw new IllegalArgumentException("Password-Only Authentication is not allowed in MQTT 3"); 62 | } 63 | return null; 64 | } 65 | 66 | private void disconnectIfConnected(final @NotNull Mqtt3Client... clients) { 67 | for (Mqtt3Client client : clients) { 68 | if (client.getState().isConnected()) { 69 | client.toBlocking().disconnect(); 70 | } 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/AsciiCharsInClientIdTestResults.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import org.jetbrains.annotations.NotNull; 19 | 20 | import java.util.LinkedList; 21 | import java.util.List; 22 | 23 | public class AsciiCharsInClientIdTestResults { 24 | private final @NotNull List<@NotNull Tuple> testResults; 25 | 26 | public AsciiCharsInClientIdTestResults(final @NotNull List> testResults) { 27 | this.testResults = testResults; 28 | } 29 | 30 | public @NotNull List<@NotNull Tuple> getTestResults() { 31 | return testResults; 32 | } 33 | 34 | public @NotNull List getUnsupportedChars() { 35 | final List unsupportedChars = new LinkedList<>(); 36 | for (Tuple tuple : testResults) { 37 | if (tuple.getValue() == null || !("SUCCESS").equals(tuple.getValue())) { 38 | unsupportedChars.add(tuple.getKey()); 39 | } 40 | } 41 | return unsupportedChars; 42 | } 43 | 44 | } 45 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/AwareTestResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | public enum AwareTestResult { 19 | NOT_SUBSCRIBED, 20 | SUBSCRIBE_FAILED, 21 | TIME_OUT, 22 | INTERRUPTED, 23 | OK; 24 | } 25 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/ClientIdLengthTestResults.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import java.util.List; 19 | 20 | import org.jetbrains.annotations.NotNull; 21 | 22 | public class ClientIdLengthTestResults { 23 | private final int maxClientIdLength; 24 | private final @NotNull List<@NotNull Tuple> testResults; 25 | 26 | public ClientIdLengthTestResults(final int maxClientIdLength, 27 | final @NotNull List<@NotNull Tuple> testResults) { 28 | this.maxClientIdLength = maxClientIdLength; 29 | this.testResults = testResults; 30 | } 31 | 32 | public int getMaxClientIdLength() { 33 | return maxClientIdLength; 34 | } 35 | 36 | public @NotNull List<@NotNull Tuple> getTestResults() { 37 | return testResults; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/ComplianceTestResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | public enum ComplianceTestResult { 19 | OK, 20 | PUBLISH_FAILED, 21 | SUBSCRIBE_FAILED, 22 | WRONG_PAYLOAD, 23 | TIME_OUT, 24 | INTERRUPTED 25 | } 26 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/PayloadTestResults.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import java.util.List; 19 | 20 | import org.jetbrains.annotations.NotNull; 21 | 22 | public class PayloadTestResults { 23 | private final int payloadSize; 24 | private final @NotNull List<@NotNull Tuple> testResults; 25 | 26 | public PayloadTestResults(final int payloadSize, 27 | final @NotNull List> testResults) { 28 | this.payloadSize = payloadSize; 29 | this.testResults = testResults; 30 | } 31 | 32 | public int getPayloadSize() { 33 | return payloadSize; 34 | } 35 | 36 | public @NotNull List<@NotNull Tuple> getTestResults() { 37 | return testResults; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/QosTestResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | public class QosTestResult { 19 | private final int receivedPublishes; 20 | private final long timeToReceivePublishes; 21 | 22 | public QosTestResult(final int receivedPublishes, final long timeToReceivePublishes) { 23 | this.receivedPublishes = receivedPublishes; 24 | this.timeToReceivePublishes = timeToReceivePublishes; 25 | } 26 | 27 | public int getReceivedPublishes() { 28 | return receivedPublishes; 29 | } 30 | 31 | public long getTimeToReceivePublishes() { 32 | return timeToReceivePublishes; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/SharedSubscriptionTestResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | public enum SharedSubscriptionTestResult { 19 | OK, 20 | NOT_SHARED, 21 | TIME_OUT, 22 | INTERRUPTED, 23 | PUBLISH_FAILED, 24 | SUBSCRIBE_FAILED; 25 | } 26 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/TopicLengthTestResults.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import java.util.List; 19 | 20 | import org.jetbrains.annotations.NotNull; 21 | 22 | public class TopicLengthTestResults { 23 | private final int maxTopicLength; 24 | private final @NotNull List<@NotNull Tuple> testResults; 25 | 26 | public TopicLengthTestResults(final int maxTopicLength, 27 | final @NotNull List<@NotNull Tuple> testResults) { 28 | this.maxTopicLength = maxTopicLength; 29 | this.testResults = testResults; 30 | } 31 | 32 | public int getMaxTopicLength() { 33 | return maxTopicLength; 34 | } 35 | 36 | public @NotNull List<@NotNull Tuple> getTestResults() { 37 | return testResults; 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/TopicUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import java.util.UUID; 19 | 20 | import org.jetbrains.annotations.NotNull; 21 | 22 | public class TopicUtils { 23 | 24 | public static @NotNull String generateTopicUUID() { 25 | final String uuid = UUID.randomUUID().toString(); 26 | return uuid.replace("-", ""); 27 | } 28 | 29 | public static @NotNull String generateTopicUUID(final int maxLength) { 30 | if (maxLength == -1 || maxLength > 32) 31 | return generateTopicUUID(); 32 | else 33 | return generateTopicUUID().substring(0, maxLength); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/Tuple.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import org.jetbrains.annotations.NotNull; 19 | import org.jetbrains.annotations.Nullable; 20 | 21 | public class Tuple { 22 | private @NotNull final A key; 23 | private @Nullable final B value; 24 | 25 | public Tuple(final @NotNull A key, final @Nullable B value) { 26 | this.key = key; 27 | this.value = value; 28 | } 29 | 30 | public @NotNull A getKey() { 31 | return key; 32 | } 33 | 34 | public @Nullable B getValue() { 35 | return value; 36 | } 37 | 38 | @Override 39 | public String toString() { 40 | return "Tuple{" + "key=" + key + ", value=" + value + '}'; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/broker/test/results/WildcardSubscriptionsTestResult.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-present HiveMQ and the HiveMQ Community 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package org.eclipse.sparkplug.tck.test.broker.test.results; 17 | 18 | import org.jetbrains.annotations.NotNull; 19 | 20 | public class WildcardSubscriptionsTestResult { 21 | private final boolean success; 22 | private final ComplianceTestResult plusWildcardTest; 23 | private final ComplianceTestResult hashWildcardTest; 24 | 25 | public @NotNull WildcardSubscriptionsTestResult(final @NotNull ComplianceTestResult plusWildcardTest, 26 | final @NotNull ComplianceTestResult hashWildcardTest) { 27 | this.plusWildcardTest = plusWildcardTest; 28 | this.hashWildcardTest = hashWildcardTest; 29 | 30 | success = (plusWildcardTest == ComplianceTestResult.OK) && (hashWildcardTest == ComplianceTestResult.OK); 31 | } 32 | 33 | public boolean isSuccess() { 34 | return success; 35 | } 36 | 37 | public @NotNull ComplianceTestResult getPlusWildcardTest() { 38 | return plusWildcardTest; 39 | } 40 | 41 | public @NotNull ComplianceTestResult getHashWildcardTest() { 42 | return hashWildcardTest; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/common/Constants.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) 2022 Anja Helmbrecht-Schaar and others 3 | *

4 | * All rights reserved. This program and the accompanying materials 5 | * are made available under the terms of the Eclipse Public License v2.0 6 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | *

8 | * SPDX-License-Identifier: EPL-2.0 9 | *

10 | * Contributors: 11 | * Anja Helmbrecht-Schaar - initial implementation and documentation 12 | */ 13 | package org.eclipse.sparkplug.tck.test.common; 14 | 15 | import com.hivemq.extension.sdk.api.annotations.NotNull; 16 | 17 | public class Constants { 18 | 19 | public static final @NotNull String TOPIC_ROOT_SP_BV_1_0 = "spBv1.0"; 20 | public static final @NotNull String TOPIC_PATH_STATE = "STATE"; 21 | public static final @NotNull String TOPIC_ROOT_STATE = TOPIC_ROOT_SP_BV_1_0 + "/" + TOPIC_PATH_STATE; 22 | public static final @NotNull String TOPIC_PATH_NBIRTH = "NBIRTH"; 23 | public static final @NotNull String TOPIC_PATH_NDEATH = "NDEATH"; 24 | public static final @NotNull String TOPIC_PATH_NCMD = "NCMD"; 25 | public static final @NotNull String TOPIC_PATH_DCMD = "DCMD"; 26 | public static final @NotNull String TOPIC_PATH_DBIRTH = "DBIRTH"; 27 | public static final @NotNull String TOPIC_PATH_DDEATH = "DDEATH"; 28 | public static final @NotNull String TOPIC_PATH_NDATA = "NDATA"; 29 | public static final @NotNull String TOPIC_PATH_DDATA = "DDATA"; 30 | 31 | public static final @NotNull String PASS = "PASS"; 32 | public static final @NotNull String FAIL = "FAIL"; 33 | public static final @NotNull String MAYBE = "MAYBE"; 34 | public static final @NotNull String NOT_EXECUTED = "NOT EXECUTED"; 35 | public static final @NotNull String NOT_YET_IMPLEMENTED = "NOT YET IMPLEMENTED"; 36 | public static final @NotNull String EMPTY = "EMPTY"; 37 | 38 | public static final String TCK_LOG_TOPIC = "SPARKPLUG_TCK/LOG"; 39 | public static final String TCK_DEVICE_CONTROL_TOPIC = "SPARKPLUG_TCK/DEVICE_CONTROL"; 40 | public static final String TCK_CONSOLE_PROMPT_TOPIC = "SPARKPLUG_TCK/CONSOLE_PROMPT"; 41 | public static final String TCK_CONSOLE_TEST_CONTROL_TOPIC = "SPARKPLUG_TCK/TEST_CONTROL"; 42 | public static final String TCK_RESULTS_TOPIC = "SPARKPLUG_TCK/RESULT"; 43 | public static final String TCK_RESULTS_CONFIG_TOPIC = "SPARKPLUG_TCK/RESULT_CONFIG"; 44 | public static final String TCK_CONFIG_TOPIC = "SPARKPLUG_TCK/CONFIG"; 45 | public static final String TCK_CONSOLE_REPLY_TOPIC = "SPARKPLUG_TCK/CONSOLE_REPLY"; 46 | 47 | public static final String TCK_HOST_CONTROL = "SPARKPLUG_TCK/HOST_CONTROL"; 48 | public static final String SPARKPLUG_AWARE_ROOT = "$sparkplug/certificates/"; 49 | 50 | public enum Profile { 51 | HOST, 52 | EDGE, 53 | BROKER 54 | } 55 | 56 | public enum TestStatus { 57 | NONE, 58 | CONSOLE_RESPONSE, 59 | CONNECTING_DEVICE, 60 | RECEIVED_NODE_DATA, 61 | RECEIVED_DEVICE_DATA, 62 | REQUESTED_NODE_DATA, 63 | REQUESTED_DEVICE_DATA, 64 | PUBLISHED_NODE_DATA, 65 | PUBLISHED_DEVICE_DATA, 66 | KILLING_DEVICE, 67 | EXPECT_NODE_REBIRTH, 68 | EXPECT_NODE_BIRTH, 69 | EXPECT_NODE_COMMAND, 70 | EXPECT_DEVICE_REBIRTH, 71 | EXPECT_DEVICE_BIRTH, 72 | EXPECT_DEVICE_COMMAND, 73 | NDEATH_MESSAGE_RECEIVED, 74 | DDEATH_MESSAGE_RECEIVED, 75 | HOST_ONLINE, 76 | WRONG_HOST_ONLINE, 77 | HOST_OFFLINE, 78 | EXPECT_DEATHS, 79 | HOST_WRONG_TIMESTAMP, 80 | HOST_ONLINE_AGAIN, 81 | PUBLISH_DEVICE_DATA, 82 | DONT_EXPECT_DEATHS, 83 | DONT_EXPECT_BIRTHS, 84 | EXPECT_DEATHS_AND_BIRTHS, 85 | EXPECT_DEVICE_DEATH, 86 | EXPECT_HOST_RECONNECT, 87 | HOSTS_ONLINE, 88 | ENDING, 89 | START, 90 | DISCONNECTING_CLIENT, 91 | SENDING_NODE_REBIRTH, 92 | SENDING_DEVICE_REBIRTH 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/common/HostUtils.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 2022 Cirrus Link Solutions 3 | * 4 | * All rights reserved. This program and the accompanying materials 5 | * are made available under the terms of the Eclipse Public License v2.0 6 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Wes Johnson - initial implementation and documentation 12 | *******************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck.test.common; 15 | 16 | import java.util.Optional; 17 | 18 | import com.hivemq.extension.sdk.api.annotations.NotNull; 19 | import com.hivemq.extension.sdk.api.packets.connect.ConnectPacket; 20 | import com.hivemq.extension.sdk.api.packets.connect.WillPublishPacket; 21 | 22 | public class HostUtils { 23 | 24 | public HostUtils() { 25 | // TODO Auto-generated constructor stub 26 | } 27 | 28 | public static boolean isHostApplication(final @NotNull String expectedHostAppId, 29 | final @NotNull ConnectPacket packet) { 30 | final Optional willPublishPacketOptional = packet.getWillPublish(); 31 | if (willPublishPacketOptional.isPresent()) { 32 | final WillPublishPacket willPublishPacket = willPublishPacketOptional.get(); 33 | 34 | // Topic is spBv1.0/STATE/{host_application_id} 35 | if (willPublishPacket.getTopic().equals( 36 | Constants.TOPIC_ROOT_SP_BV_1_0 + "/" + Constants.TOPIC_PATH_STATE + "/" + expectedHostAppId)) { 37 | return true; 38 | } 39 | } 40 | return false; 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/common/PersistentUtils.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 2022 Cirrus Link Solutions 3 | * 4 | * All rights reserved. This program and the accompanying materials 5 | * are made available under the terms of the Eclipse Public License v2.0 6 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Wes Johnson - initial implementation and documentation 12 | *******************************************************************************/ 13 | 14 | package org.eclipse.sparkplug.tck.test.common; 15 | 16 | import java.io.File; 17 | import java.nio.charset.StandardCharsets; 18 | import java.nio.file.Files; 19 | import java.nio.file.Paths; 20 | import java.nio.file.StandardOpenOption; 21 | 22 | import org.slf4j.Logger; 23 | import org.slf4j.LoggerFactory; 24 | 25 | public class PersistentUtils { 26 | 27 | private static Logger logger = LoggerFactory.getLogger(PersistentUtils.class.getName()); 28 | 29 | private static final String SPARKPLUG_DIRNAME = "Sparkplug_TCK_Temp_Dir"; 30 | 31 | private static final String TMP_DIR = System.getProperty("java.io.tmpdir"); 32 | 33 | private static final String FILE_SEPARATOR = System.getProperty("file.separator"); 34 | 35 | private static final String HOST_BD_SEQ_NUM_FILE_NAME = 36 | TMP_DIR + FILE_SEPARATOR + SPARKPLUG_DIRNAME + FILE_SEPARATOR + "HOST_BD_SEQ_NUM"; 37 | 38 | public static int getNextHostDeathBdSeqNum() { 39 | try { 40 | File bdSeqNumFile = new File(HOST_BD_SEQ_NUM_FILE_NAME); 41 | if (bdSeqNumFile.exists()) { 42 | int bdSeqNum = Integer 43 | .parseInt(Files.readString(Paths.get(HOST_BD_SEQ_NUM_FILE_NAME), StandardCharsets.UTF_8)); 44 | logger.info("Next Host Death bdSeq number: {}", bdSeqNum); 45 | return bdSeqNum; 46 | } else { 47 | return 0; 48 | } 49 | } catch (Exception e) { 50 | logger.error("Failed to get the bdSeq number from the persistent directory", e); 51 | return 0; 52 | } 53 | } 54 | 55 | public static void setNextHostDeathBdSeqNum(int bdSeqNum) { 56 | try { 57 | Files.writeString(Paths.get(HOST_BD_SEQ_NUM_FILE_NAME), Long.toString(bdSeqNum), 58 | StandardOpenOption.TRUNCATE_EXISTING); 59 | } catch (Exception e) { 60 | logger.error("Failed to write the Host bdSeq number to the persistent directory", e); 61 | } 62 | } 63 | 64 | public static int getNextHostDeathBdSeqNum(String hostName) { 65 | try { 66 | File bdSeqNumFile = new File(HOST_BD_SEQ_NUM_FILE_NAME + hostName); 67 | if (bdSeqNumFile.exists()) { 68 | int bdSeqNum = Integer 69 | .parseInt(Files.readString(Paths.get(HOST_BD_SEQ_NUM_FILE_NAME), StandardCharsets.UTF_8)); 70 | logger.info("Next Host Death bdSeq number: {}", bdSeqNum); 71 | return bdSeqNum; 72 | } else { 73 | return -1; 74 | } 75 | } catch (Exception e) { 76 | logger.error("Failed to get the bdSeq number from the persistent directory", e); 77 | return -1; 78 | } 79 | } 80 | 81 | public static void setNextHostDeathBdSeqNum(String hostName, int bdSeqNum) { 82 | try { 83 | Files.writeString(Paths.get(HOST_BD_SEQ_NUM_FILE_NAME), Long.toString(bdSeqNum), 84 | StandardOpenOption.TRUNCATE_EXISTING); 85 | } catch (Exception e) { 86 | logger.error("Failed to write the Host bdSeq number to the persistent directory", e); 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /tck/src/main/java/org/eclipse/sparkplug/tck/test/common/StatePayload.java: -------------------------------------------------------------------------------- 1 | /******************************************************************************* 2 | * Copyright (c) 2022 Cirrus Link Solutions 3 | * 4 | * All rights reserved. This program and the accompanying materials 5 | * are made available under the terms of the Eclipse Public License v2.0 6 | * which is available at https://www.eclipse.org/legal/epl-2.0/ 7 | * 8 | * SPDX-License-Identifier: EPL-2.0 9 | * 10 | * Contributors: 11 | * Wes Johnson - initial implementation and documentation 12 | *******************************************************************************/ 13 | package org.eclipse.sparkplug.tck.test.common; 14 | 15 | import com.fasterxml.jackson.annotation.JsonProperty; 16 | 17 | public class StatePayload { 18 | 19 | @JsonProperty("online") 20 | private Boolean online; 21 | 22 | @JsonProperty("timestamp") 23 | private Long timestamp; 24 | 25 | public StatePayload() { 26 | this.online = null; 27 | this.timestamp = null; 28 | } 29 | 30 | public StatePayload(Boolean online, Long timestamp) { 31 | super(); 32 | this.online = online; 33 | this.timestamp = timestamp; 34 | } 35 | 36 | public Boolean isOnline() { 37 | return online; 38 | } 39 | 40 | public void setOnline(Boolean online) { 41 | this.online = online; 42 | } 43 | 44 | public Long getTimestamp() { 45 | return timestamp; 46 | } 47 | 48 | public void setTimestamp(Long timestamp) { 49 | this.timestamp = timestamp; 50 | } 51 | 52 | @Override 53 | public String toString() { 54 | StringBuilder builder = new StringBuilder(); 55 | builder.append("StatePayload [online="); 56 | builder.append(online); 57 | builder.append(", timestamp="); 58 | builder.append(timestamp); 59 | builder.append("]"); 60 | return builder.toString(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /tck/webconsole/.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 4 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /tck/webconsole/.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Node template 3 | # Logs 4 | /logs 5 | *.log 6 | npm-debug.log* 7 | yarn-debug.log* 8 | yarn-error.log* 9 | 10 | # Runtime data 11 | pids 12 | *.pid 13 | *.seed 14 | *.pid.lock 15 | 16 | # Directory for instrumented libs generated by jscoverage/JSCover 17 | lib-cov 18 | 19 | # Coverage directory used by tools like istanbul 20 | coverage 21 | 22 | # nyc test coverage 23 | .nyc_output 24 | 25 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 26 | .grunt 27 | 28 | # Bower dependency directory (https://bower.io/) 29 | bower_components 30 | 31 | # node-waf configuration 32 | .lock-wscript 33 | 34 | # Compiled binary addons (https://nodejs.org/api/addons.html) 35 | build/Release 36 | 37 | # Dependency directories 38 | node_modules/ 39 | jspm_packages/ 40 | 41 | # TypeScript v1 declaration files 42 | typings/ 43 | 44 | # Optional npm cache directory 45 | .npm 46 | 47 | # Optional eslint cache 48 | .eslintcache 49 | 50 | # Optional REPL history 51 | .node_repl_history 52 | 53 | # Output of 'npm pack' 54 | *.tgz 55 | 56 | # Yarn Integrity file 57 | .yarn-integrity 58 | 59 | # dotenv environment variables file 60 | .env 61 | 62 | # parcel-bundler cache (https://parceljs.org/) 63 | .cache 64 | 65 | # next.js build output 66 | .next 67 | 68 | # nuxt.js build output 69 | .nuxt 70 | 71 | # Nuxt generate 72 | dist 73 | 74 | # vuepress build output 75 | .vuepress/dist 76 | 77 | # Serverless directories 78 | .serverless 79 | 80 | # IDE / Editor 81 | .idea 82 | .vscode 83 | 84 | # Service worker 85 | sw.* 86 | 87 | # macOS 88 | .DS_Store 89 | 90 | # Vim swap files 91 | *.swp 92 | 93 | .yarn/* 94 | !.yarn/cache 95 | !.yarn/patches 96 | !.yarn/plugins 97 | !.yarn/releases 98 | !.yarn/sdks 99 | !.yarn/versions 100 | -------------------------------------------------------------------------------- /tck/webconsole/README.md: -------------------------------------------------------------------------------- 1 | # webconsole 2 | 3 | ## Build Setup 4 | 5 | ```bash 6 | # install dependencies 7 | $ yarn install 8 | 9 | # serve with hot reload at localhost:3000 10 | $ yarn dev 11 | 12 | # build for production and launch server 13 | $ yarn build 14 | $ yarn start 15 | 16 | # generate static project 17 | $ yarn generate 18 | ``` 19 | 20 | For detailed explanation on how things work, check out [Nuxt.js docs](https://nuxtjs.org). 21 | 22 | If you get an error message on "yarn install" of the sort: 23 | 24 | `The engine “node” is incompatible with this module. Expected version “>=10". Got “8.9.4”` 25 | 26 | then the command 27 | 28 | `yarn install --ignore-engines` 29 | 30 | might help. -------------------------------------------------------------------------------- /tck/webconsole/assets/EclipseLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/tck/webconsole/assets/EclipseLogo.png -------------------------------------------------------------------------------- /tck/webconsole/assets/README.md: -------------------------------------------------------------------------------- 1 | # ASSETS 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains your un-compiled assets such as LESS, SASS, or JavaScript. 6 | 7 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/assets#webpacked). 8 | -------------------------------------------------------------------------------- /tck/webconsole/assets/SparkplugLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/tck/webconsole/assets/SparkplugLogo.png -------------------------------------------------------------------------------- /tck/webconsole/components/Navbar.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 39 | 40 | -------------------------------------------------------------------------------- /tck/webconsole/components/README.md: -------------------------------------------------------------------------------- 1 | # COMPONENTS 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | The components directory contains your Vue.js Components. 6 | 7 | _Nuxt.js doesn't supercharge these components._ 8 | -------------------------------------------------------------------------------- /tck/webconsole/components/Sparkplug/Broker.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 47 | 48 | 106 | -------------------------------------------------------------------------------- /tck/webconsole/components/Sparkplug/EoNNode.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 47 | 48 | 109 | -------------------------------------------------------------------------------- /tck/webconsole/components/Sparkplug/HostApplication.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 40 | 41 | 95 | -------------------------------------------------------------------------------- /tck/webconsole/components/Sparkplug/Logo.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 53 | 54 | -------------------------------------------------------------------------------- /tck/webconsole/components/Tck/AllTests.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 30 | 31 | 48 | -------------------------------------------------------------------------------- /tck/webconsole/components/Tck/Logging.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 37 | 38 | 62 | -------------------------------------------------------------------------------- /tck/webconsole/components/Tck/TestResultSetup.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 39 | 40 | 41 | 51 | -------------------------------------------------------------------------------- /tck/webconsole/components/Tck/TestsInformation.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 33 | 34 | -------------------------------------------------------------------------------- /tck/webconsole/components/WebConsole/Information.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | -------------------------------------------------------------------------------- /tck/webconsole/layouts/README.md: -------------------------------------------------------------------------------- 1 | # LAYOUTS 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains your Application Layouts. 6 | 7 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/views#layouts). 8 | -------------------------------------------------------------------------------- /tck/webconsole/layouts/default.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 20 | 21 | 77 | -------------------------------------------------------------------------------- /tck/webconsole/middleware/README.md: -------------------------------------------------------------------------------- 1 | # MIDDLEWARE 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains your application middleware. 6 | Middleware let you define custom functions that can be run before rendering either a page or a group of pages. 7 | 8 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/routing#middleware). 9 | -------------------------------------------------------------------------------- /tck/webconsole/nuxt.config.js: -------------------------------------------------------------------------------- 1 | export default { 2 | // Global page headers: https://go.nuxtjs.dev/config-head 3 | head: { 4 | title: "webconsole", 5 | htmlAttrs: { 6 | lang: "en" 7 | }, 8 | meta: [ 9 | { charset: "utf-8" }, 10 | { name: "viewport", content: "width=device-width, initial-scale=1" }, 11 | { hid: "description", name: "description", content: "" } 12 | ], 13 | link: [{ rel: "icon", type: "image/x-icon", href: "/favicon.ico" }] 14 | }, 15 | 16 | // Global CSS: https://go.nuxtjs.dev/config-css 17 | css: [], 18 | 19 | // Plugins to run before rendering page: https://go.nuxtjs.dev/config-plugins 20 | plugins: [], 21 | 22 | // Auto import components: https://go.nuxtjs.dev/config-components 23 | components: true, 24 | 25 | // Modules for dev and build (recommended): https://go.nuxtjs.dev/config-modules 26 | buildModules: [], 27 | 28 | // Modules: https://go.nuxtjs.dev/config-modules 29 | modules: [ 30 | // https://go.nuxtjs.dev/bootstrap 31 | "bootstrap-vue/nuxt", 32 | "nuxt-clipboard2" 33 | ], 34 | 35 | bootstrapVue: { 36 | // Install the `IconsPlugin` plugin (in addition to `BootstrapVue` plugin) 37 | icons: true 38 | }, 39 | 40 | // Build Configuration: https://go.nuxtjs.dev/config-build 41 | build: {} 42 | }; 43 | -------------------------------------------------------------------------------- /tck/webconsole/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "webconsole", 3 | "version": "1.0.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "nuxt", 7 | "build": "nuxt build", 8 | "start": "nuxt start", 9 | "generate": "nuxt generate" 10 | }, 11 | "dependencies": { 12 | "@babel/core": "^7.15.0", 13 | "@babel/plugin-proposal-private-property-in-object": "^7.14.5", 14 | "bootstrap": "^4.6.0", 15 | "bootstrap-vue": "^2.21.2", 16 | "chokidar": "^3.5.2", 17 | "consola": "^2.15.3", 18 | "core-js": "^3.16.2", 19 | "lodash": "^4.17.21", 20 | "mqtt": "^4.2.8", 21 | "nuxt": "^2.15.8", 22 | "nuxt-clipboard2": "^0.2.1", 23 | "paho-mqtt": "^1.1.0", 24 | "uuid": "^8.3.2", 25 | "vue-clipboard2": "^0.3.1" 26 | }, 27 | "packageManager": "yarn@3.0.1" 28 | } 29 | -------------------------------------------------------------------------------- /tck/webconsole/pages/README.md: -------------------------------------------------------------------------------- 1 | # PAGES 2 | 3 | This directory contains your Application Views and Routes. 4 | The framework reads all the `*.vue` files inside this directory and creates the router of your application. 5 | 6 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/routing). 7 | -------------------------------------------------------------------------------- /tck/webconsole/plugins/README.md: -------------------------------------------------------------------------------- 1 | # PLUGINS 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains Javascript plugins that you want to run before mounting the root Vue.js application. 6 | 7 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/plugins). 8 | -------------------------------------------------------------------------------- /tck/webconsole/prettier.config.js: -------------------------------------------------------------------------------- 1 | // prettier.config.js or .prettierrc.js 2 | module.exports = { 3 | trailingComma: "es5", 4 | tabWidth: 2, 5 | semi: true, 6 | printWidth: 120, 7 | arrowParens: "always", 8 | jsxBracketSameLine: true, 9 | }; 10 | -------------------------------------------------------------------------------- /tck/webconsole/static/README.md: -------------------------------------------------------------------------------- 1 | # STATIC 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains your static files. 6 | Each file inside this directory is mapped to `/`. 7 | Thus you'd want to delete this README.md before deploying to production. 8 | 9 | Example: `/static/robots.txt` is mapped as `/robots.txt`. 10 | 11 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/assets#static). 12 | -------------------------------------------------------------------------------- /tck/webconsole/static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/eclipse-sparkplug/sparkplug/e45b61002a2a0cacb4f11fa4be88670435ccfcb0/tck/webconsole/static/favicon.ico -------------------------------------------------------------------------------- /tck/webconsole/store/README.md: -------------------------------------------------------------------------------- 1 | # STORE 2 | 3 | **This directory is not required, you can delete it if you don't want to use it.** 4 | 5 | This directory contains your Vuex Store files. 6 | Vuex Store option is implemented in the Nuxt.js framework. 7 | 8 | Creating a file in this directory automatically activates the option in the framework. 9 | 10 | More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/vuex-store). 11 | --------------------------------------------------------------------------------