├── package.json
├── icons
├── sparkplug.svg
└── sparkplugio.svg
├── .gitignore
├── examples
├── MQTT Publish and Subscribe.json
└── Simple Device.json
├── locales
└── en-US
│ ├── mqtt-sparkplug-plus.json
│ └── mqtt-sparkplug-plus.html
├── CHANGELOG.md
├── test
├── sparkplug_device_dataset_spec.js
├── sparkplug_in__spec.js
├── sparkplug_out__spec.js
├── sparkplug_device_command_spec.js
├── sparkplug_device_store_forward_spec.js
├── sparkplug_EoN_command_spec.js
├── sparkplug_device_template_spec.js
└── sparkplug_device__spec.js
├── readme.md
└── mqtt-sparkplug-plus.html
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "node-red-contrib-mqtt-sparkplug-plus",
3 | "version": "2.1.11",
4 | "repository": {
5 | "type": "git",
6 | "url": "https://github.com/thomassorensen2650/node-red-contrib-mqtt-sparkplug-plus.git"
7 | },
8 | "engines": {
9 | "node": ">=14.0.0"
10 | },
11 | "description": "MQTT-Sparkplug-Plus is a set of Node-Red nodes, that will enable Node-Red to communicate with a MQTT Server using the sparkplug b protocol.",
12 | "node-red": {
13 | "version": ">=2.0.0",
14 | "nodes": {
15 | "mqtt-sparkplug-plus": "mqtt-sparkplug-plus.js"
16 | }
17 | },
18 | "scripts": {
19 | "test": "mocha \"test/**/*_spec.js\""
20 | },
21 | "keywords": [
22 | "MQTT",
23 | "Sparkplug",
24 | "node-red"
25 | ],
26 | "author": "Thomas Sørensen",
27 | "license": "ISC",
28 | "dependencies": {
29 | "https-proxy-agent": "^7.0.2",
30 | "mqtt": "^4.3.7",
31 | "pako": "^2.1.0",
32 | "protobufjs": "^7.2.5",
33 | "sparkplug-payload": "1.0.3",
34 | "long" : "^5.2.3"
35 | },
36 | "devDependencies": {
37 | "mocha": "^10.2.0",
38 | "node-red": "^3.1.3",
39 | "node-red-node-test-helper": "^0.3.3"
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/icons/sparkplug.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/icons/sparkplugio.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Microbundle cache
58 | .rpt2_cache/
59 | .rts2_cache_cjs/
60 | .rts2_cache_es/
61 | .rts2_cache_umd/
62 |
63 | # Optional REPL history
64 | .node_repl_history
65 |
66 | # Output of 'npm pack'
67 | *.tgz
68 |
69 | # Yarn Integrity file
70 | .yarn-integrity
71 |
72 | # dotenv environment variables file
73 | .env
74 | .env.test
75 | .env.production
76 |
77 | # parcel-bundler cache (https://parceljs.org/)
78 | .cache
79 | .parcel-cache
80 |
81 | # Next.js build output
82 | .next
83 | out
84 |
85 | # Nuxt.js build / generate output
86 | .nuxt
87 | dist
88 |
89 | # Gatsby files
90 | .cache/
91 | # Comment in the public line in if your project uses Gatsby and not Next.js
92 | # https://nextjs.org/blog/next-9-1#public-directory-support
93 | # public
94 |
95 | # vuepress build output
96 | .vuepress/dist
97 |
98 | # Serverless directories
99 | .serverless/
100 |
101 | # FuseBox cache
102 | .fusebox/
103 |
104 | # DynamoDB Local files
105 | .dynamodb/
106 |
107 | # TernJS port file
108 | .tern-port
109 |
110 | # Stores VSCode versions used for testing VSCode extensions
111 | .vscode-test
112 |
113 | # yarn v2
114 | .yarn/cache
115 | .yarn/unplugged
116 | .yarn/build-state.yml
117 | .yarn/install-state.gz
118 | .pnp.*
119 | package-lock.json
120 | .mqtt-sparkplug.js.swp
121 |
--------------------------------------------------------------------------------
/examples/MQTT Publish and Subscribe.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "id": "239c9025714089d3",
4 | "type": "tab",
5 | "label": "MQTT Publish & Subscribe",
6 | "disabled": false,
7 | "info": "Example on how MQTT Sparkplug out and MQTT Sparkplug in nodes can be used"
8 | },
9 | {
10 | "id": "9d1c7a8cf6909762",
11 | "type": "mqtt sparkplug in",
12 | "z": "239c9025714089d3",
13 | "name": "",
14 | "topic": "spBv1.0/+/+/#",
15 | "qos": "2",
16 | "broker": "0d831bd9ba588536",
17 | "x": 210,
18 | "y": 100,
19 | "wires": [
20 | [
21 | "d34cd297becb6078"
22 | ]
23 | ]
24 | },
25 | {
26 | "id": "ac1434142dd6fe0a",
27 | "type": "mqtt sparkplug out",
28 | "z": "239c9025714089d3",
29 | "name": "",
30 | "topic": "spBv1.0/My Devices/NCMD/Node-Red",
31 | "qos": "",
32 | "retain": "",
33 | "broker": "0d831bd9ba588536",
34 | "x": 500,
35 | "y": 200,
36 | "wires": []
37 | },
38 | {
39 | "id": "829cbe0b130463ac",
40 | "type": "inject",
41 | "z": "239c9025714089d3",
42 | "name": "Send Rebirth Request",
43 | "props": [
44 | {
45 | "p": "payload"
46 | },
47 | {
48 | "p": "topic",
49 | "vt": "str"
50 | }
51 | ],
52 | "repeat": "",
53 | "crontab": "",
54 | "once": false,
55 | "onceDelay": 0.1,
56 | "topic": "",
57 | "payload": "{\"metrics\":[{\"name\":\"Node Control/Rebirth\",\"value\":true,\"type\":\"Boolean\"}]}",
58 | "payloadType": "json",
59 | "x": 180,
60 | "y": 200,
61 | "wires": [
62 | [
63 | "ac1434142dd6fe0a"
64 | ]
65 | ]
66 | },
67 | {
68 | "id": "d34cd297becb6078",
69 | "type": "debug",
70 | "z": "239c9025714089d3",
71 | "name": "",
72 | "active": true,
73 | "tosidebar": true,
74 | "console": false,
75 | "tostatus": false,
76 | "complete": "false",
77 | "statusVal": "",
78 | "statusType": "auto",
79 | "x": 410,
80 | "y": 100,
81 | "wires": []
82 | },
83 | {
84 | "id": "0d831bd9ba588536",
85 | "type": "mqtt-sparkplug-broker",
86 | "name": "Local Host",
87 | "deviceGroup": "My Devices",
88 | "eonName": "Node-Red",
89 | "broker": "localhost",
90 | "port": "1883",
91 | "clientid": "",
92 | "usetls": false,
93 | "protocolVersion": "4",
94 | "keepalive": "60",
95 | "cleansession": true,
96 | "enableStoreForward": false,
97 | "compressAlgorithm": "",
98 | "primaryScada": "MY SCADA"
99 | }
100 | ]
--------------------------------------------------------------------------------
/examples/Simple Device.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "id": "55018ef8f0d13b74",
4 | "type": "mqtt sparkplug device",
5 | "z": "8221da631e912105",
6 | "name": "My Device",
7 | "metrics": {
8 | "testing/test1": {
9 | "dataType": "Int32"
10 | },
11 | "testing/test2": {
12 | "dataType": "Int32"
13 | }
14 | },
15 | "broker": "0d831bd9ba588536",
16 | "x": 370,
17 | "y": 120,
18 | "wires": [
19 | [
20 | "ec9bfe25385c2e74"
21 | ]
22 | ]
23 | },
24 | {
25 | "id": "75bcd1ea33eb5014",
26 | "type": "inject",
27 | "z": "8221da631e912105",
28 | "name": "Send Metrics",
29 | "props": [
30 | {
31 | "p": "payload"
32 | },
33 | {
34 | "p": "topic",
35 | "vt": "str"
36 | }
37 | ],
38 | "repeat": "",
39 | "crontab": "",
40 | "once": false,
41 | "onceDelay": 0.1,
42 | "topic": "",
43 | "payload": "{\"metrics\":[{\"name\":\"testing/test1\",\"value\":11},{\"name\":\"testing/test2\",\"value\":12}]}",
44 | "payloadType": "json",
45 | "x": 170,
46 | "y": 120,
47 | "wires": [
48 | [
49 | "55018ef8f0d13b74"
50 | ]
51 | ]
52 | },
53 | {
54 | "id": "ec9bfe25385c2e74",
55 | "type": "debug",
56 | "z": "8221da631e912105",
57 | "name": "NCMD",
58 | "active": true,
59 | "tosidebar": true,
60 | "console": false,
61 | "tostatus": false,
62 | "complete": "payload",
63 | "targetType": "msg",
64 | "statusVal": "",
65 | "statusType": "auto",
66 | "x": 570,
67 | "y": 120,
68 | "wires": []
69 | },
70 | {
71 | "id": "c733eafb2dd65e76",
72 | "type": "comment",
73 | "z": "8221da631e912105",
74 | "name": "Read Me",
75 | "info": "# Simple Flow\nThis flow shows a basic example on how a MQTT Sparkplug Device can be implemented.\n\nMetrics are send in the Payload, and NCMDs (write request from other clients) are send as an output.",
76 | "x": 380,
77 | "y": 60,
78 | "wires": []
79 | },
80 | {
81 | "id": "0d831bd9ba588536",
82 | "type": "mqtt-sparkplug-broker",
83 | "name": "Local Host",
84 | "deviceGroup": "My Devices",
85 | "eonName": "Node-Red",
86 | "broker": "localhost",
87 | "port": "1883",
88 | "clientid": "",
89 | "usetls": false,
90 | "protocolVersion": "4",
91 | "keepalive": "60",
92 | "cleansession": true,
93 | "birthTopic": "",
94 | "birthQos": "0",
95 | "birthPayload": "",
96 | "birthMsg": {},
97 | "closeTopic": "",
98 | "closeQos": "0",
99 | "closePayload": "",
100 | "closeMsg": {},
101 | "willTopic": "",
102 | "willQos": "0",
103 | "willPayload": "",
104 | "willMsg": {},
105 | "sessionExpiry": "",
106 | "credentials": {}
107 | }
108 | ]
--------------------------------------------------------------------------------
/locales/en-US/mqtt-sparkplug-plus.json:
--------------------------------------------------------------------------------
1 | {
2 | "mqtt-sparkplug-plus": {
3 | "label": {
4 | "name" : "Name",
5 | "username": "Username",
6 | "topic": "Topic",
7 | "password": "Password",
8 | "device": "Device",
9 |
10 | "aliasMetrics" : "Use Alias for Metrics",
11 | "compressoutput" : "Compress Body",
12 | "messagetype": "Msg. Type",
13 | "eon" : "Node",
14 | "group" : "Group",
15 | "allgroups": "All Groups",
16 | "allnodes": "All Nodes",
17 | "allmessagetypes": "All Message Types",
18 | "alldevices": "All Devices",
19 | "broker": "Server",
20 | "example": "e.g. localhost",
21 | "output": "Output",
22 | "qos": "QoS",
23 | "retain": "Retain",
24 | "clientid": "Client ID",
25 | "port": "Port",
26 | "keepalive": "Keep Alive",
27 | "cleansession": "Use clean session",
28 | "cleanstart": "Use clean start",
29 | "use-tls": "Use TLS",
30 | "tls-config":"TLS Configuration",
31 | "verify-server-cert":"Verify server certificate",
32 | "compatmode": "Use legacy MQTT 3.1 support",
33 | "userProperties": "User Properties",
34 | "subscriptionIdentifier": "Subscription ID",
35 | "flags": "Flags",
36 | "protocolVersion": "Protocol",
37 | "protocolVersion4": "MQTT V3.1.1",
38 | "topicAliasMaximum": "Alias Max",
39 | "maximumPacketSize": "Max Packet Size",
40 | "receiveMaximum": "Receive Max",
41 | "session": "Session",
42 | "delay": "Delay",
43 | "templates" : "Templates",
44 | "birthImmediately" : "Send Birth Immediately",
45 | "bufferDevice" : "Store Forward when not connected",
46 | "storeforward": "Store Forward when primary application is offline",
47 | "manualEoNBirth" : "Do not connect automaticly"
48 | },
49 | "tip": {
50 | "storeforward" : "all the buffered values will be kept in memory. Do not use this for high thoughput applications.",
51 | "device": "Tip: Use '/' to group related metrics e.g. Machine1/Speed",
52 | "sparkplugformat": "Topic format: namespace/group_id/message_type/edge_node_id/[device_id]",
53 | "birthImmediately" : "The Send Birth Immediately option will send device online (DBIRTH) with NULL values as soon as node-red starts up."
54 | },
55 | "tabs-label": {
56 | "connection": "Connection",
57 | "security": "Security",
58 | "sparkplug": "Sparkplug",
59 | "advanced" : "Advanced",
60 | "metrics" : " Metrics"
61 | },
62 | "placeholder": {
63 | "clientid": "Leave blank for auto generated",
64 | "clientid-nonclean":"Must be set for non-clean sessions",
65 | "eonname":"Node-Red",
66 | "name": "Sparkplug Device",
67 | "command": "NDATA",
68 | "username" : "Leave blank for anonymous login"
69 | },
70 | "state": {
71 | "connected": "Connected to broker: __broker__",
72 | "disconnected": "Disconnected from broker: __broker__",
73 | "connect-failed": "Connection failed to broker: __broker__"
74 | },
75 | "retain": "Retain",
76 | "output": {
77 | "buffer": "a Buffer",
78 | "string": "a String",
79 | "base64": "a Base64 encoded string",
80 | "auto": "auto-detect (string or buffer)",
81 | "json": "a parsed JSON object"
82 | },
83 | "true": "true",
84 | "false": "false",
85 | "errors": {
86 | "buffer-full": "the buffer is full. Older data will be lost",
87 | "missing-config": "missing broker configuration",
88 | "missing-attribute-name": "All metrics must have a name",
89 | "nonclean-missingclientid": "No client ID set, using clean session",
90 | "invalid-topic": "The Topic is invalid",
91 | "device-unknown-metric": "The metric '__name__' is not recognized by the device",
92 | "device-no-metrics": "Metrics should be an Array",
93 | "payload-type-object": "Payload must be of type object",
94 | "unable-to-decode-message": "Unable to decode __type__ message. the error is : __error__",
95 | "unable-to-encode-message": "Unable to encode __type__ message. the error is : __error__",
96 | "not-defined": "Topic is not defined",
97 | "invalid-metric-definition": "Unable to use '__name__' as a metric. the error is : __error__",
98 | "unable-to-deserialize-templates" : "Unable to parse templates. The error is : __error__",
99 | "invalid-metric-data": "Unable to send data for metric '__name__'. the error is : __error__"
100 | }
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ### 2.1.11: Maintenance Release
2 | Fixed:
3 | - #80 - Node sends message content of 'NDEATH'
4 | - #79 - metric timestamp not optional
5 | - #68 - All metrics are now cloned before any modifications.
6 | Added:
7 | - Added support for string timestamps
8 |
9 | ### 2.1.10: Maintenance Release
10 | Fixed:
11 | - #68 - shaddow copy
12 | - Rebirth should ignore alias.
13 |
14 | ### 2.1.9: Maintenance Release
15 | Fixed:
16 | - Fixed #64 - bdSeq increase on rebirth
17 |
18 | ### 2.1.8: Maintenance Release
19 | Fixed:
20 | - Fixed NBIRTH on Primary Application state change as reported in #65
21 |
22 | ### 2.1.7: Maintenance Release
23 | Fixed:
24 | - Loosen the dependency requirement so that it now works with Node14 / Node-red 2.0.
25 |
26 | ### 2.1.2: Maintenance Release
27 | Added:
28 | - Added check to verify DCMD topic is correct. (This should never happen, but just in case)
29 | -
30 | ### 2.1.1: Maintenance Release
31 | Added:
32 | - Added support for unsigned integers
33 |
34 | Fixed:
35 | - Issue where old MQTT topic will be used when Devices was renamed.
36 | - Timestamp was not added to historical metric values
37 |
38 | ### 2.1.0: Maintenance Release
39 | Added:
40 | - Option to buffer when not conected
41 |
42 | Fixed:
43 | - Renamed primary SCADA to primary Application per. Sparkplug B. Spec.
44 |
45 | ### 2.0.1: Maintenance Release
46 | Fixed:
47 | - Dynamic DataSet fix
48 |
49 | ### 2.0.1: Maintenance Release
50 | Fixed:
51 | - Moved Broker Reconnect to Connection Tab
52 | - Fixed incorrect information in documentation
53 | - Fixed unit failed unit test.
54 |
55 | ### 2.0.0: Major Release
56 |
57 | New:
58 | - Added support for DataSets
59 | - Redesigned Broker configuration UI
60 | - Added support for manual connection of the EoN
61 | - Added connect command for EoN
62 | - Added set_name command for EoN node
63 | - Added set_name for device
64 | - Added set_group for EoN.
65 | - Support for parameter sorting
66 | - Updated all dependencies to newest versions.
67 |
68 | Fixed:
69 | - MQTT In now converts seq from Long to Number
70 | - Timestamps are now automaticly converted from Long to Date
71 | - DCMD commands for the devices using aliases are not converted back to names correctly.
72 | - Mqtt In node will only parse topic in the Sparkplug namespace (MQTT in can now be used for other topics
73 | than sparkplug B)
74 |
75 | ### 1.4.1: Maintenance Release
76 |
77 | Fixed:
78 | - bdSeq now acts per v3 spec.
79 |
80 | ### 1.4.0: Maintenance Release
81 |
82 | New:
83 | - Added Birth Immediately option to allow sending DBirth on start up
84 | - Added support for Sparkplug B V3.0.0 style Primary SCADA STATE
85 |
86 | ### 1.3.2: Maintenance Release
87 |
88 | New:
89 | - Added support metric alias
90 |
91 |
92 | ### 1.3.1: Maintenance Release
93 |
94 | New:
95 | - Added support for device command (rebirth and death)
96 |
97 | Fixed:
98 | - Fixed minor issue that would make close on node-red redeply timeout.
99 |
100 | ### 1.3.0: Maintenance Release
101 |
102 | New:
103 | - _mqtt sparkplug device_ added support for metric properties (property sets)
104 |
105 | ### 1.2.0: Maintenance Release
106 |
107 | Fixed:
108 | - _mqtt sparkplug device_ datatype were not added to UI.
109 |
110 |
111 | ### 1.2.0: Maintenance Release
112 | New:
113 | - _mqtt sparkplug device_ node now supports dynanic metrics (metrics can be defined via msg.definition)
114 |
115 | Fixed:
116 | - _mqtt sparkplug device_ rebirth now sends correct NDEATH before NBIRTH
117 |
118 | ### 1.1.0: Maintenance Release
119 |
120 | New:
121 | - _mqtt sparkplug in_ and _mqtt sparkplug device_ node now supports compression (DEFLATE and GZIP)
122 | - _mqtt sparkplug out_ supports topic defined in input message
123 | - Invalid mesasges to _mqtt sparkplug out_ without metric types, are not caught and a more friendly error message is now shown.
124 |
125 | ### 1.0.1: Maintenance Release
126 |
127 | Fixed:
128 | - Added missing dependency to MQTT that caused issues loading the nodes on some systems.
129 |
130 | ### 1.0.0: Major Release
131 |
132 | Fixed:
133 | - Null values are now correctly serialized.
134 |
135 | New:
136 | - Store Forward when primary SCADA is offline can be enabled
137 | - Added documentation for the *mqtt-sparkplug-broker* configuration node
138 | - Added new *mqtt sparkplug out* node
139 |
140 | ### 0.0.5: Maintenance Release
141 |
142 | Fixed:
143 | - Updated documentation
144 | - Standadized how invalid responses are handled
145 | - Unit tests coverage is now 80%-90%
146 | - majority of texts are from message catalog (i18n)
147 |
148 | New:
149 | - Added _mqtt sparkplug in_ node (clone of mqtt in with sparkplug decoding)
150 | - Udated colors and logos of nodes.
151 |
152 | ### 0.0.4: Maintenance Release
153 |
154 | Fixed
155 | - Removed dead code
156 | - Updated documentation with Optional Metrics timestamp (#1)
157 | - Moved more messages to message catalog (internationalization)
158 | - Support for metrics with NULL values
159 | - Added this change log
160 | - MQTT lib buffer functionality is now enabled. This will enable buffering of messages when the node is not connected to a broker (It still need to connect before it starts buffering).
161 | - started adding unit tests (very limited coverage)
162 |
163 | #### 0.0.3: Initial Release
164 |
165 | - First released version.
166 |
--------------------------------------------------------------------------------
/test/sparkplug_device_dataset_spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 | let testBroker = 'mqtt://localhost';
10 | var client = null;
11 |
12 | describe('mqtt sparkplug device node - DataSet Support', function () {
13 | beforeEach(function (done) {
14 | helper.startServer(done);
15 | });
16 |
17 | afterEach(function (done) {
18 | helper.unload();
19 | helper.stopServer(done);
20 | if (client) {
21 | client.end();
22 | }
23 | });
24 | var simpleFlow = [
25 | {
26 | "id": "n1",
27 | "type": "mqtt sparkplug device",
28 | "name": "TEST100",
29 | "metrics": {
30 | "a": {
31 | "dataType": "DataSet"
32 | }
33 | },
34 | "broker": "b1"
35 | },
36 | {
37 | "id": "b1",
38 | "type": "mqtt-sparkplug-broker",
39 | "name": "Local Host",
40 | "deviceGroup": "My Devices",
41 | "eonName": "Node-Red",
42 | "broker": "localhost",
43 | "port": "1883",
44 | "clientid": "",
45 | "usetls": false,
46 | "protocolVersion": "4",
47 | "keepalive": "60",
48 | "cleansession": true,
49 | "enableStoreForward": false,
50 | "primaryScada": "MY SCADA"
51 | }
52 | ];
53 |
54 | it('Should send valid dataset', function (done) {
55 | let msg = {
56 | "payload" : {
57 | "metrics": [
58 | {
59 | "name": "a",
60 | "value": {
61 | "numOfColumns": 2,
62 |
63 | "types": [
64 | "String",
65 | "String"
66 | ],
67 | "columns": [
68 | "Col1",
69 | "OtherCol"
70 | ],
71 | "rows": [
72 | [
73 | "a",
74 | "A"
75 | ],
76 | [
77 | "v",
78 | "B"
79 | ]
80 | ]
81 | }
82 | }
83 | ]
84 | }
85 | };
86 |
87 | client = mqtt.connect(testBroker);
88 | let n1;
89 | let b1;
90 | client.on('message', function (topic, message) {
91 | // Verify that we sent a DBirth Message to the broker
92 |
93 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST100"){
94 |
95 |
96 | var buffer = Buffer.from(message);
97 | var payload = spPayload.decodePayload(buffer);
98 | let val = payload.metrics[0].value;
99 | let metric = payload.metrics[0];
100 |
101 | val.types.should.containDeep(msg.payload.metrics[0].value.types)
102 | val.columns.should.containDeep(msg.payload.metrics[0].value.columns)
103 | val.rows.should.containDeep(msg.payload.metrics[0].value.rows)
104 | val.numOfColumns.low.should.eql(2);
105 | //.should.containDeep(msg.payload.metrics);
106 | done();
107 | }
108 | });
109 | client.on('connect', function () {
110 | client.subscribe('#', function (err) {
111 | if (!err) {
112 | helper.load(sparkplugNode, simpleFlow, function () {
113 | try {
114 | n1 = helper.getNode("n1");
115 | b1 = n1.brokerConn;
116 | n1.receive(msg);
117 | }catch (e) {
118 | done(e);
119 | }
120 | });
121 | }else {
122 | done(err);
123 | }
124 | })
125 | });
126 | });
127 |
128 | it('Should warn of invalid dataset', function (done) {
129 | let msg = {
130 | "payload" : {
131 | "metrics": [
132 | {
133 | "name": "a",
134 | "value": {
135 | "rows": [
136 | [
137 | "a",
138 | "A"
139 | ],
140 | [
141 | "v",
142 | "B"
143 | ]
144 | ]
145 | }
146 | }
147 | ]
148 | }
149 | };
150 |
151 | let n1;
152 | let b1;
153 | helper.load(sparkplugNode, simpleFlow, function () {
154 | try {
155 | n1 = helper.getNode("n1");
156 | n1.on('input', () => {
157 | n1.warn.should.be.calledWithExactly("mqtt-sparkplug-plus.errors.invalid-metric-data");
158 | done();
159 | });
160 | b1 = n1.brokerConn;
161 | n1.receive(msg);
162 | }catch (e) {
163 | done(e);
164 | }
165 | });
166 | });
167 |
168 | it('Should warn if rowCount != actual array size', function (done) {
169 | let msg = {
170 | "payload" : {
171 | "metrics": [
172 | {
173 | "name": "a",
174 | "value": {
175 | "numOfColumns": 3,
176 |
177 | "types": [
178 | "String",
179 | "String"
180 | ],
181 | "columns": [
182 | "Col1",
183 | "OtherCol"
184 | ],
185 | "rows": [
186 | [
187 | "a",
188 | "A"
189 | ],
190 | [
191 | "v",
192 | "B"
193 | ]
194 | ]
195 | }
196 | }
197 | ]
198 | }
199 | };
200 | let n1;
201 | let b1;
202 |
203 |
204 | helper.load(sparkplugNode, simpleFlow, function () {
205 | try {
206 | n1 = helper.getNode("n1");
207 | n1.on('input', () => {
208 | n1.warn.should.be.calledWithExactly("mqtt-sparkplug-plus.errors.invalid-metric-data");
209 | done();
210 | });
211 | b1 = n1.brokerConn;
212 | n1.receive(msg);
213 | }catch (e) {
214 | done(e);
215 | }
216 | });
217 | })
218 |
219 | it('Should warn if arrays are not the same size', function (done) {
220 | let msg = {
221 | "payload" : {
222 | "metrics": [
223 | {
224 | "name": "a",
225 | "value": {
226 | "numOfColumns": 2,
227 |
228 | "types": [
229 | "String",
230 | "String"
231 | ],
232 | "columns": [
233 | "Col1",
234 | //"OtherCol"
235 | ],
236 | "rows": [
237 | [
238 | "a",
239 | "A"
240 | ],
241 | [
242 | "v",
243 | "B"
244 | ]
245 | ]
246 | }
247 | }
248 | ]
249 | }
250 | };
251 |
252 | let n1;
253 | let b1;
254 |
255 | helper.load(sparkplugNode, simpleFlow, function () {
256 | n1 = helper.getNode("n1");
257 | n1.on('input', () => {
258 | n1.warn.should.be.calledWithExactly("mqtt-sparkplug-plus.errors.invalid-metric-data");
259 | done();
260 | });
261 |
262 | try {
263 |
264 | b1 = n1.brokerConn;
265 | n1.receive(msg);
266 | }catch (e) {
267 | done(e);
268 | }
269 | });
270 | });
271 | });
272 |
273 |
--------------------------------------------------------------------------------
/test/sparkplug_in__spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 |
10 | /**
11 | * MQTT Sparkplug B in testing
12 | */
13 | describe('mqtt sparkplug in node', function () {
14 |
15 | beforeEach(function (done) {
16 | helper.startServer(done);
17 | });
18 |
19 | afterEach(function (done) {
20 | helper.unload();
21 | helper.stopServer(done);
22 | });
23 |
24 | var inExample = [
25 | {
26 | "id": "n2",
27 | "type": "helper",
28 | },
29 | {
30 | "id": "in",
31 | "type": "mqtt sparkplug in",
32 | "name": "",
33 | "topic": "#", //"spBv1.0/+/DDATA/+/+",
34 | "qos": "2",
35 | "broker": "b1",
36 | "wires": [["n2"]]
37 | },
38 | {
39 | "id": "out",
40 | "type": "mqtt sparkplug out",
41 | "topic": "spBv1.0/My Devices/DDATA/Node-Red/TEST2",
42 | "broker": "b1",
43 | "wires": []
44 | },
45 | {
46 | "id": "b1",
47 | "type": "mqtt-sparkplug-broker",
48 | "name": "Local Host",
49 | "deviceGroup": "My Devices",
50 | "eonName": "Node-Red",
51 | "broker": "localhost",
52 | "port": "1883",
53 | "clientid": "",
54 | "usetls": false,
55 | "protocolVersion": "4",
56 | "keepalive": "60",
57 | "cleansession": true,
58 | "credentials": {}
59 | }
60 | ]
61 | var validMsg = {"timestamp":new Date(),"metrics":[{"name":"test","type":"Int32","value":100}],"seq":200}
62 |
63 | it('should ouput a subscribed topic', function (done) {
64 |
65 | helper.load(sparkplugNode, inExample, function () {
66 |
67 | var n2 = helper.getNode("n2");
68 | var out = helper.getNode("out");
69 | b1 = out.brokerConn;
70 | b1.client.on("connect", function (msg) {
71 | out.receive({ payload: validMsg});
72 | });
73 | n2.on("input", function (msg) {
74 | // Output event from MQTT Sparkplug In
75 | try {
76 | if (msg.topic == "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
77 | msg.should.have.property('payload');
78 | msg.payload.metrics.should.deepEqual(validMsg.metrics);
79 | msg.payload.timestamp.getTime().should.eql(validMsg.timestamp);
80 |
81 | //msg.payload.should.deepEqual(validMsg);
82 | done();
83 | }
84 |
85 | } catch(err) {
86 | done(err);
87 | }
88 | });
89 | });
90 | });
91 |
92 | it('should only decode spB namespace', function (done) {
93 |
94 | helper.load(sparkplugNode, inExample, function () {
95 |
96 | var n2 = helper.getNode("n2");
97 | var out = helper.getNode("out");
98 | b1 = out.brokerConn;
99 |
100 | b1.client.on('connect',function (connack) {
101 | b1.client.publish("MyTopic", "Hello there");
102 | });
103 | n2.on("input", function (msg) {
104 | // Output event from MQTT Sparkplug In
105 | try {
106 | if (msg.topic == "MyTopic") {
107 | let p = msg.payload.toString();
108 | p.should.eql("Hello there");
109 | //msg.payload.should.deepEqual(validMsg);
110 | done();
111 | }
112 | } catch(err) {
113 | done(err);
114 | }
115 | });
116 | });
117 | });
118 |
119 |
120 | it('should decompress a DEFLATE encoded topic', function (done) {
121 |
122 | var testMsg = {
123 | topic : "spBv1.0/My Devices/DDATA/Node-Red/TEST2",
124 | payload : spPayload.encodePayload(validMsg)
125 | }
126 |
127 | var compressedPayload = {
128 | "uuid" : "SPBV1.0_COMPRESSED",
129 | body : pako.deflate(testMsg.payload),
130 | metrics : [ {
131 | "name" : "algorithm",
132 | "value" : "DEFLATE",
133 | "type" : "string"
134 | } ]
135 | };
136 | compressedPayload = spPayload.encodePayload(compressedPayload);
137 | helper.load(sparkplugNode, inExample, function () {
138 | var n2 = helper.getNode("n2");
139 | var b1 = helper.getNode("b1");
140 | b1.client.on('connect',function (connack) {
141 | b1.client.publish(testMsg.topic, compressedPayload);
142 | });
143 | n2.on("input", function (msg) {
144 | try {
145 | if (msg.topic == testMsg.topic) {
146 | msg.should.have.property('payload');
147 | msg.payload.seq.should.eql(200)
148 | msg.payload.metrics.should.deepEqual(validMsg.metrics);
149 | done();
150 | }
151 | } catch(err) {
152 | done(err);
153 | }
154 | });
155 | });
156 |
157 | });
158 |
159 | it('should decompress a GZIP encoded topic', function (done) {
160 |
161 | var testMsg = {
162 | topic : "spBv1.0/My Devices/DDATA/Node-Red/TEST2",
163 | payload : spPayload.encodePayload(validMsg)
164 | }
165 |
166 | var compressedPayload = {
167 | "uuid" : "SPBV1.0_COMPRESSED",
168 | body : pako.gzip(testMsg.payload),
169 | metrics : [ {
170 | "name" : "algorithm",
171 | "value" : "GZIP",
172 | "type" : "string"
173 | } ]
174 | };
175 | compressedPayload = spPayload.encodePayload(compressedPayload);
176 |
177 | helper.load(sparkplugNode, inExample, function () {
178 | var n2 = helper.getNode("n2");
179 |
180 | var b1 = helper.getNode("b1");
181 | b1.client.on('connect',function (connack) {
182 | b1.client.publish(testMsg.topic, compressedPayload);
183 | });
184 |
185 | n2.on("input", function (msg) {
186 | if (msg.topic == testMsg.topic) {
187 | try {
188 | msg.should.have.property('payload');
189 | msg.payload.seq.should.eql(200)
190 | msg.payload.metrics.should.deepEqual(validMsg.metrics);
191 | done();
192 | } catch(err) {
193 | done(err);
194 | }
195 | }
196 | });
197 | });
198 |
199 | });
200 |
201 | it('should error on invalid compression topic', function (done) {
202 |
203 | var compressedPayload = {
204 | "uuid" : "SPBV1.0_COMPRESSED",
205 | body : "Hello World!",
206 | metrics : [ {
207 | "name" : "algorithm",
208 | "value" : "DEFLATE",
209 | "type" : "string"
210 | } ],
211 | seq : 200
212 | };
213 | compressedPayload = spPayload.encodePayload(compressedPayload);
214 |
215 |
216 | helper.load(sparkplugNode, inExample, function () {
217 | var n2 = helper.getNode("n2");
218 | var n1 = helper.getNode("in");
219 | var b1 = helper.getNode("b1");
220 | b1.client.on('connect',function (connack) {
221 | b1.client.publish("spBv1.0/My Devices/DDATA/Node-Red/TEST2", compressedPayload);
222 | });
223 | n1.on('call:error', call => {
224 | call.should.be.calledWithExactly('mqtt-sparkplug-plus.errors.unable-to-decode-message');
225 | done();
226 | });
227 |
228 | n2.on("input", function (msg) {
229 | try {
230 | msg.should.have.property('payload');
231 | } catch(err) {
232 | done(err);
233 | }
234 | });
235 | });
236 |
237 | });
238 |
239 | });
240 |
--------------------------------------------------------------------------------
/test/sparkplug_out__spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 | let testBroker = 'mqtt://localhost';
10 | var client = null;
11 |
12 | /**
13 | * mqtt sparkplug out testing
14 | */
15 | describe('mqtt sparkplug out node', function () {
16 |
17 | beforeEach(function (done) {
18 | helper.startServer(done);
19 | });
20 |
21 | afterEach(function (done) {
22 | helper.unload();
23 | helper.stopServer(done);
24 | if (client) {
25 | client.end();
26 | }
27 | });
28 | var validMsg = {"timestamp":12345,"metrics":[{"name":"test","type":"Int32","value":100}],"seq":200}
29 |
30 |
31 | outFlow = [
32 | {
33 | "id": "n1",
34 | "type": "mqtt sparkplug out",
35 | "topic": "spBv1.0/My Devices/DDATA/Node-Red/TEST2",
36 | "broker": "b1",
37 | "wires": []
38 | },
39 | {
40 | "id": "b1",
41 | "type": "mqtt-sparkplug-broker",
42 | "name": "Local Host",
43 | "deviceGroup": "My Devices",
44 | "eonName": "Node-Red",
45 | "broker": "localhost",
46 | "port": "1883",
47 | "clientid": "",
48 | "usetls": false,
49 | "protocolVersion": "4",
50 | "keepalive": "60",
51 | "cleansession": true,
52 | "enableStoreForward": true,
53 | "primaryScada": "MY SCADA",
54 | "credentials": {}
55 | }
56 | ]
57 |
58 | outFlow2 = [
59 | {
60 | "id": "n1",
61 | "type": "mqtt sparkplug out",
62 | "topic": "spBv1.0/My Devices/DDATA/Node-Red/TEST2",
63 | "broker": "b1",
64 | "wires": []
65 | },
66 | {
67 | "id": "b1",
68 | "type": "mqtt-sparkplug-broker",
69 | "name": "Local Host",
70 | "deviceGroup": "My Devices",
71 | "eonName": "Node-Red",
72 | "broker": "localhost",
73 | "port": "1883",
74 | "clientid": "",
75 | "usetls": false,
76 | "protocolVersion": "4",
77 | "keepalive": "60",
78 | "cleansession": true,
79 | "enableStoreForward": false,
80 | "primaryScada": "MY SCADA",
81 | "credentials": {}
82 | }
83 | ]
84 | /**
85 | * Verify that we outout a topic even though the primary SCADA is offline
86 | */
87 | it('should ouput a publish topic', function (done) {
88 |
89 | var n1 = null;
90 | client = mqtt.connect(testBroker);
91 | client.on('connect', function () {
92 | client.subscribe("spBv1.0/My Devices/DDATA/Node-Red/TEST2", function (err) {
93 | if (!err) {
94 | helper.load(sparkplugNode, outFlow, function () {
95 | n1 = helper.getNode("n1");
96 | n1.brokerConn.enableStoreForward = false; // Force enable to buffer
97 |
98 | setTimeout(() => n1.receive({ payload: validMsg}), 500);
99 | });
100 | }
101 | });
102 | });
103 |
104 | client.on('message', function (topic, message) {
105 | var buffer = Buffer.from(message);
106 | var payload = spPayload.decodePayload(buffer);
107 | n1.brokerConn.primaryScadaStatus.should.eql("OFFLINE");
108 | payload.timestamp = payload.timestamp.toNumber()
109 | payload.seq = payload.seq.toNumber()
110 | payload.should.deepEqual(validMsg);
111 | done();
112 | });
113 | });
114 |
115 | /**
116 | * Verify that we outout a topic even though the primary SCADA is offline
117 | */
118 | it('should ouput a publish topic with string timestamp on metric', function (done) {
119 |
120 | var p = {"timestamp":12345,"metrics":[{"name":"test","type":"Int32","value":100, "timestamp": "2024-08-31T14:32:18.626Z"}],"seq":200}
121 | var sendTS = Date.parse(p.metrics[0].timestamp);
122 |
123 | var n1 = null;
124 | client = mqtt.connect(testBroker);
125 | client.on('connect', function () {
126 | client.subscribe("spBv1.0/My Devices/DDATA/Node-Red/TEST2", function (err) {
127 | if (!err) {
128 | helper.load(sparkplugNode, outFlow, function () {
129 | n1 = helper.getNode("n1");
130 | n1.brokerConn.enableStoreForward = false; // Force enable to buffer
131 |
132 | setTimeout(() => n1.receive({ payload: p}), 500);
133 | });
134 | }
135 | });
136 | });
137 |
138 | client.on('message', function (topic, message) {
139 | var buffer = Buffer.from(message);
140 | var payload = spPayload.decodePayload(buffer);
141 | n1.brokerConn.primaryScadaStatus.should.eql("OFFLINE");
142 | payload.metrics[0].timestamp.toNumber().should.eql(sendTS)
143 | payload.seq = payload.seq.toNumber()
144 | //payload.should.deepEqual(validMsg);
145 | done();
146 | });
147 | });
148 |
149 |
150 |
151 |
152 |
153 |
154 | /**
155 |
156 | * Verify that we'll buffer if forced enabled,
157 |
158 | it('should buffer if publish topic', function (done) {
159 |
160 | outFlow[0].shouldBuffer = true;
161 |
162 | var n1 = null;
163 | client = mqtt.connect(testBroker);
164 | client.on('connect', function () {
165 | client.publish("STATE/MY SCADA", "OFFLINE", true)
166 |
167 | client.subscribe("spBv1.0/My Devices/DDATA/Node-Red/TEST2", function (err) {
168 | if (!err) {
169 | helper.load(sparkplugNode, outFlow, function () {
170 | n1 = helper.getNode("n1");
171 | n1.shouldBuffer = true; // Force enable to buffer
172 | setTimeout(() => n1.receive({ payload: validMsg}), 500);
173 | setTimeout(() => client.publish("STATE/MY SCADA", "ONLINE", true), 1000);
174 | });
175 | }
176 | });
177 | });
178 |
179 | client.on('message', function (topic, message) {
180 | console.log(topic);
181 | var buffer = Buffer.from(message);
182 | var payload = spPayload.decodePayload(buffer);
183 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
184 | payload.timestamp = payload.timestamp.toNumber()
185 | payload.seq = payload.seq.toNumber()
186 | payload.should.deepEqual(validMsg);
187 | done();
188 | });
189 | });
190 |
191 | /**
192 | * Verify that we'll buffer if forced enabled,
193 |
194 | it('should buffer on Sparkplug B 3.0.0 style State variable', function (done) {
195 |
196 | var n1 = null;
197 | client = mqtt.connect(testBroker);
198 | client.on('connect', function () {
199 | client.subscribe("spBv1.0/My Devices/DDATA/Node-Red/TEST2", function (err) {
200 | if (!err) {
201 | helper.load(sparkplugNode, outFlow, function () {
202 | n1 = helper.getNode("n1");
203 | n1.shouldBuffer = true; // Force enable to buffer
204 | setTimeout(() => n1.receive({ payload: validMsg}), 100);
205 | setTimeout(() => client.publish("spBv1.0/STATE/MY SCADA", JSON.stringify({ online : true, timestamp : new Date() }), true), 700); });
206 | }
207 |
208 | });
209 | });
210 |
211 | client.on('message', function (topic, message) {
212 | var buffer = Buffer.from(message);
213 | var payload = spPayload.decodePayload(buffer);
214 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
215 | payload.timestamp = payload.timestamp.toNumber()
216 | payload.seq = payload.seq.toNumber()
217 | payload.should.deepEqual(validMsg);
218 | done();
219 | });
220 | });
221 | */
222 | });
--------------------------------------------------------------------------------
/test/sparkplug_device_command_spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 | let testBroker = 'mqtt://localhost';
10 | var client = null;
11 |
12 | var complexFlow = [
13 | {
14 | "id": "n1",
15 | "type": "mqtt sparkplug device",
16 | "name": "TEST2",
17 | "metrics": {
18 | "test": {
19 | "dataType": "Int32"
20 | },
21 | "test2": {
22 | "dataType": "Int32"
23 | }
24 | },
25 | "broker": "b1",
26 | "wires": [["h1"]]
27 | },
28 | {
29 | "id": "h1",
30 | "type": "helper"
31 | },
32 | {
33 | "id": "n2",
34 | "type": "mqtt sparkplug in",
35 | "name": "",
36 | "topic": "spBv1.0/#",
37 | "qos": "2",
38 | "broker": "b1",
39 | "wires": [["h2"]]
40 | },
41 | {
42 | "id": "h2",
43 | "type": "helper"
44 | },
45 | {
46 | "id": "b1",
47 | "type": "mqtt-sparkplug-broker",
48 | "name": "Local Host",
49 | "deviceGroup": "My Devices",
50 | "eonName": "Node-Red",
51 | "broker": "localhost",
52 | "port": "1883",
53 | "clientid": "",
54 | "usetls": false,
55 | "protocolVersion": "4",
56 | "keepalive": "60",
57 | "cleansession": true,
58 | "enableStoreForward": false,
59 | "primaryScada": "MY SCADA"
60 | }
61 | ];
62 |
63 | describe('mqtt sparkplug device commands', function () {
64 | beforeEach(function (done) {
65 | helper.startServer(done);
66 | });
67 |
68 | afterEach(function (done) {
69 | helper.unload();
70 | helper.stopServer(done);
71 | if (client) {
72 | client.end();
73 | }
74 | });
75 | var simpleFlow = [
76 | {
77 | "id": "n1",
78 | "type": "mqtt sparkplug device",
79 | "name": "TEST2",
80 | "metrics": {
81 | "test": {
82 | "dataType": "Int32"
83 | },
84 | "test2": {
85 | "dataType": "Int32"
86 | }
87 | },
88 | "broker": "b1"
89 | },
90 | {
91 | "id": "b1",
92 | "type": "mqtt-sparkplug-broker",
93 | "name": "Local Host",
94 | "deviceGroup": "My Devices",
95 | "eonName": "Node-Red",
96 | "broker": "localhost",
97 | "port": "1883",
98 | "clientid": "",
99 | "usetls": false,
100 | "protocolVersion": "4",
101 | "keepalive": "60",
102 | "cleansession": true,
103 | "enableStoreForward": false,
104 | "primaryScada": "MY SCADA"
105 | }
106 | ];
107 |
108 | it('should support device rename', function (done) {
109 |
110 | // TODO: Implement
111 | client = mqtt.connect(testBroker);
112 |
113 | // 1. Send a rename command and metrics.. verify that metrics are send using the new name
114 |
115 | let n1;
116 | let b1;
117 | client.on('connect', function () {
118 | client.subscribe('#', function (err) {
119 | if (!err) {
120 | helper.load(sparkplugNode, simpleFlow, function () {
121 | try {
122 | n1 = helper.getNode("n1");
123 | b1 = n1.brokerConn;
124 | n1.receive({
125 | "command" : {
126 | "device" : {
127 | "set_name" : "NEW_NAME"
128 | }
129 | }
130 | })
131 | // Send all metrics to trigger DBIRTH
132 | n1.receive({
133 | "payload" : {
134 | "metrics": [
135 | {
136 | "name": "test",
137 | "value": 11,
138 | },
139 | {
140 | "name": "test2",
141 | "value": 11
142 | }
143 | ]}
144 | });
145 |
146 | }catch (e) {
147 | done(e);
148 | }
149 | });
150 | }
151 | })
152 | });
153 |
154 | client.on('message', function (topic, message) {
155 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/NEW_NAME") {
156 | var buffer = Buffer.from(message);
157 | var payload = spPayload.decodePayload(buffer);
158 | payload.should.have.property("seq");
159 | payload.seq.toInt().should.eql(1);
160 | done();
161 | }
162 | });
163 | }); // it end
164 |
165 | it('should rebirth on device rename', function (done) {
166 | // TODO: Implement
167 | client = mqtt.connect(testBroker);
168 |
169 | // 1. Send a rename command and metrics.. verify that metrics are send using the new name
170 |
171 | let n1;
172 | let b1;
173 | client.on('connect', function () {
174 | client.subscribe('#', function (err) {
175 | if (!err) {
176 | helper.load(sparkplugNode, simpleFlow, function () {
177 |
178 |
179 | try {
180 | n1 = helper.getNode("n1");
181 | b1 = n1.brokerConn;
182 |
183 | b1.client.on('connect',function (connack) {
184 | n1.receive({
185 | "payload" : {
186 | "metrics": [
187 | {
188 | "name": "test",
189 | "value": 11,
190 | },
191 | {
192 | "name": "test2",
193 | "value": 11
194 | }
195 | ]}
196 | });
197 | n1.receive({
198 | "command" : {
199 | "device" : {
200 | "set_name" : "NEW_NAME"
201 | }
202 | }
203 | })
204 | });
205 | // Send all metrics to trigger DBIRTH
206 |
207 |
208 | }catch (e) {
209 | done(e);
210 | }
211 | });
212 | }
213 | })
214 | });
215 |
216 | var stateId = 0;
217 | client.on('message', function (topic, message) {
218 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2") {
219 | stateId.should.eql(0);
220 | stateId++
221 | }
222 | if (topic === "spBv1.0/My Devices/DDEATH/Node-Red/TEST2") {
223 | stateId.should.eql(1);
224 | stateId++
225 | }
226 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/NEW_NAME") {
227 | stateId.should.eql(2);
228 | done();
229 | }
230 | });
231 | });
232 | it('should be only output once', function (done) {
233 | helper.load(sparkplugNode, complexFlow, function () {
234 | var n1 = helper.getNode("n1");
235 | var firstMsg = true;
236 | // Helper node
237 | var h1 = helper.getNode("h1");
238 | var h2 = helper.getNode("h2");
239 | h1.on("input", function (msg) {
240 | firstMsg.should.be.true();
241 | firstMsg = false;
242 | setTimeout(function() {
243 | done();
244 | }, 500);
245 |
246 | });
247 |
248 |
249 |
250 |
251 | client = mqtt.connect(testBroker);
252 |
253 | client.on('connect', function () {
254 | // First send input data to send DBIRTH
255 | n1.receive({
256 | "payload" : {
257 | "metrics": [
258 | {
259 | "name": "test",
260 | "value": 11,
261 | },
262 | {
263 | "name": "test2",
264 | "value": 11
265 | }
266 | ]}
267 | });
268 | setTimeout(function(){
269 | // Send send command
270 | var command = {
271 | metrics : [
272 | {
273 | "name" : "test",
274 | "type" : "Boolean",
275 | "value": 123
276 | },
277 | ]
278 | }
279 | let payload = spPayload.encodePayload(command);
280 | client.publish("spBv1.0/My Devices/DCMD/Node-Red/TEST2", payload);
281 | }, 200)
282 |
283 | });
284 | });
285 | });
286 | });
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # MQTT Sparkplug implementation for Node-Red
2 |
3 | MQTT-Sparkplug-Plus is a set of Node-Red nodes, that will enable Node-Red to communicate with other client over MQTT using the sparkplug b protocol. The package contains the followings nodes:
4 |
5 | ## mqtt sparkplug device
6 | The *mqtt sparkplug device* act as a [Sparkplug B complient](https://s3.amazonaws.com/ignition-modules/Current/Sparkplug+Specification.pdf) EoN Node.
7 |
8 | The node will connect to an MQTT broker (server) and act as an MQTT Edge of Network (EoN) Node.
9 |
10 | The client will handle the following message types automatically:
11 |
12 | * NBIRTH
13 | * DBIRTH
14 | * NCMD : REBIRTH
15 | * NDEATH
16 |
17 | The following message types can be implemented by the user:
18 |
19 | * DDATA (from node input)
20 | * DCMD (send as output to Node-Red)
21 | * DDEATH (set via a msg command)
22 |
23 | The following sparkplug featues can also be handled by the node:
24 | * Buffering when primary SCADA is not available
25 | * Compression
26 | * Metric Alias
27 |
28 | ### Input
29 | One or more metrics can be written to the **mqtt sparkplug device** by passing the metrics details to the input of the **mqtt sparkplug device**. The default behaviour is to wait for a value for each of the metrics before sending birth message. so make sure to pass all metrics on start up. This functionality can be disabled by selecting **Send Birth Immediately** on the advanted settings for the device.
30 |
31 |
32 | A birth message will not be send before all metrics have been received at least once. so make sure to pass all metrics as soon as possible on start up.
33 |
34 | The **mqtt sparkplug device** expects the metrics in the following input payload format. DataTypes can be added, but it will also be added automaticly by the node if omitted. if a metric does not have a value then a "is_null" attribute is added to the metric. Timestamps are optional per the specification. If timetamp to the metrics are not supplied, the the current time will still be added to the DBIRTH message (nothing will be added to the DDATA).
35 |
36 | ```javascript
37 | msg.payload = {
38 | "metrics": [
39 | {
40 | "name": "testing/test1",
41 | "timestamp" : new Date(), // Timestamp is optional.
42 | "value": 11
43 | },
44 | {
45 | "name": "testing/test2",
46 | "value": 12
47 | }
48 | ]
49 | }
50 | ```
51 |
52 | Example sending DataSet:
53 | ```javascript
54 | msg.payload = {
55 | "metrics": [
56 | {
57 | "name": "a",
58 | "value": {
59 | "numOfColumns": 2,
60 | "types": [
61 | "String",
62 | "String"
63 | ],
64 | "columns": [
65 | "Col1",
66 | "OtherCol"
67 | ],
68 | "rows": [
69 | [
70 | "a",
71 | "A"
72 | ],
73 | [
74 | "v",
75 | "B"
76 | ]
77 | ]
78 | }
79 | }
80 | ]
81 | }
82 | ```
83 |
84 | ### Dynamic metric definitions
85 |
86 | Metrics definitions should normally be setup via the UI, but in some cases its beneficial to configure the metrics via code. This can be done by configuring the metrics using the `msg.definition` attribute.
87 |
88 | The following example shows a message that also sets the definition. __DO NOT__ include definition is each message, as it will trigger rebirth each time a valid `msg.definition` is processed by the node.
89 |
90 | Setting metrics dynamicly also also allows you to set properties (like engineering units) for the metrics. This functionality is currently not supported when configuring metrics via the UI.
91 |
92 | The example belows shows how to set definitions via code (payload is optional):
93 | ```javascript
94 | msg = {
95 | "definition" : {
96 | "TEST/TEST" : {
97 | "dataType" : "Int32",
98 | "properties": {
99 | "engUnits": {
100 | "type": "String",
101 | "value": "inHg"
102 | }
103 | },
104 | }
105 | },
106 | "payload" : {
107 | "metrics" : [
108 | {
109 | "name" : "TEST/TEST",
110 | "value" : 5
111 | }]
112 | }
113 | };
114 | ```
115 |
116 | _If the definition set set after the NBIRTH has been sent, them a REBIRTH is issued to notify clients about the new definition._
117 |
118 | ### Commands
119 |
120 | Commands can be used to force REBIRTH or to send DDEATH to a device. Sending DDEATH is a good way to indicate that a connected device is offline. If a DDEANTH is send, a new birth message will be send on the next metric payload to the device or when a rebirth command is send.
121 |
122 | Rebirth Example:
123 | ```javascript
124 | msg = {
125 | "command" : {
126 | "device" : {
127 | "rebirth" : true
128 | }
129 | }
130 | ```
131 |
132 | Death Example:
133 | ```javascript
134 | msg = {
135 | "command" : {
136 | "device" : {
137 | "death" : true
138 | }
139 | }
140 | ```
141 |
142 | Commands can also be used for dynamic configuration of the Devices and the EoN Nodes (MQTT Server Configuration).
143 |
144 | A device can be renamed by setting the follow command. This is especially useful if you have a device with 0 metrics and dynamic metrics.
145 |
146 | ```javascript
147 | msg = {
148 | "command" : {
149 | "device" : {
150 | "set_name" : "NEW_NAME"
151 | }
152 | }
153 | ```
154 |
155 | A EoN Node (MQTT Server) can also be renamed using the following command. If the EoN Node is configuration to manual connection, then the rename command can be combined with a connect command to set the name and connect to the Broker with the new name:
156 |
157 | ```javascript
158 | msg = {
159 | "command" : {
160 | "node" : {
161 | "set_name" : "NEW_NAME",
162 | "set_group" : "NEW_GROUP",
163 | "connect" : true
164 | }
165 | }
166 | ```
167 |
168 | ## mqtt sparkplug in
169 | The *mqtt sparkplug in* node makes it possible to subscribe to sparkplug b mqtt topics. The node is almost identical to the default node-red *mqtt in* node, but it will decode the sparkplug/protobuf messages and deliver them in json.
170 |
171 | ## mqtt sparkplug out
172 | The *mqtt sparkplug in* node makes it possible to publish sparkplug b mqtt messages. The node almost identical to the default node-red *mqtt out* node, but it will encode the sparkplug/protobuf payload before sending message.
173 |
174 | # Installation
175 | npm install node-red-contrib-mqtt-sparkplug-plus
176 |
177 | # Usage
178 | The easiest way to get started is to start with the example that is provided with the module.
179 |
180 | ## From Example
181 | 1. Open the Node-Red Export Dialog
182 | 2. Select the Examples Tab
183 | 3. Navigate to Node-red-contrib-mqtt-sparkplug-plus, and select the Simple Device Example
184 | 4. Deploy changes to Node-Red
185 | 5. Press the "Send Metrics" Inject node to write metrics to the new device (This will trigger a DBIRTH and NDATA first and pressed and a NDATA each time after that)
186 |
187 | You'll need a MQTT broker running on your local computer
188 |
189 | ## Manual Configuration
190 | 1. Drag a **mqtt sparkplug device** to the Node-Red Runtime.
191 | 2. Configure MQTT broker connection for the device
192 | 3. Configure the name (this will be the name used in the messages) and the metrics
193 | 4. Configure upstream node-red nodes to send metrics data to the **mqtt sparkplug device**
194 | 5. Configure downstream node-red nodes to handle NCMDs (write commands)
195 |
196 | # Contributions
197 | Contributions are welcome. Please discuss new features before creating PR, and please try to add unit test for new features if possible.
198 |
--------------------------------------------------------------------------------
/test/sparkplug_device_store_forward_spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 | let testBroker = 'mqtt://localhost';
10 | var client = null;
11 |
12 | describe('mqtt sparkplug device node - Store Forward', function () {
13 | beforeEach(function (done) {
14 | helper.startServer(done);
15 | });
16 |
17 | afterEach(function (done) {
18 | helper.unload();
19 | helper.stopServer(done);
20 | if (client) {
21 | client.end();
22 | }
23 | });
24 |
25 | var simpleFlow = [
26 | {
27 | "id": "n1",
28 | "type": "mqtt sparkplug device",
29 | "name": "TEST2",
30 | "metrics": {
31 | "test": {
32 | "dataType": "Int32"
33 | },
34 | "test2": {
35 | "dataType": "Int32"
36 | }
37 | },
38 | "broker": "b1"
39 | },
40 | {
41 | "id": "b1",
42 | "type": "mqtt-sparkplug-broker",
43 | "name": "Local Host",
44 | "deviceGroup": "My Devices",
45 | "eonName": "Node-Red",
46 | "broker": "localhost",
47 | "port": "1883",
48 | "clientid": "",
49 | "usetls": false,
50 | "protocolVersion": "4",
51 | "keepalive": "60",
52 | "cleansession": true,
53 | "enableStoreForward": false,
54 | "primaryScada": "MY SCADA"
55 | }
56 | ];
57 | // STORE FORWARD TESTING
58 | it('should buffer when primary SCADA IS OFFLINE', function (done) {
59 | client = mqtt.connect(testBroker);
60 |
61 | // WARN! We'll enable buffering for all tests
62 | simpleFlow[1].enableStoreForward = true;
63 |
64 | // SET OFFLINE
65 | // Send Birth
66 | // SET SCADA ONLINE
67 | // VERIFY BIRTH is send when ONLINE
68 |
69 | var initBirthDone = false;
70 | let n1;
71 | let b1;
72 | client.on('connect', function () {
73 | client.publish("STATE/MY SCADA", "OFFLINE", true);
74 | // Set Online after 250ms
75 | setTimeout(() => client.publish("STATE/MY SCADA", "ONLINE", true), 500);
76 | client.subscribe('#', function (err) {
77 | if (!err) {
78 | helper.load(sparkplugNode, simpleFlow, function () {
79 | try {
80 | n1 = helper.getNode("n1");
81 | b1 = n1.brokerConn;
82 | n1.on('call:error', call => {
83 | console.log("ERROR", call.firstArg);
84 | call.firstArg.should.eql("mqtt-sparkplug-plus.errors.payload-type-object")
85 | done();
86 | });
87 | b1.on('call:error', call => {
88 | console.log("ERROR1", call.firstArg);
89 | call.firstArg.should.eql("mqtt-sparkplug-plus.errors.payload-type-object")
90 | done();
91 | });
92 |
93 | // Send all metrics to trigger DBIRTH
94 | n1.receive({
95 | "payload" : {
96 | "metrics": [
97 | {
98 | "name": "test",
99 | "value": 11,
100 | },
101 | {
102 | "name": "test2",
103 | "value": 11
104 | }
105 | ]}
106 | });
107 |
108 | }catch (e) {
109 | done(e);
110 | }
111 | });
112 | }
113 | })
114 |
115 |
116 | });
117 |
118 | client.on('message', function (topic, message) {
119 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
120 | var buffer = Buffer.from(message);
121 | var payload = spPayload.decodePayload(buffer);
122 | payload.should.have.property("seq");
123 | payload.seq.toInt().should.eql(0);
124 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
125 |
126 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
127 | var buffer = Buffer.from(message);
128 | var payload = spPayload.decodePayload(buffer);
129 | payload.should.have.property("seq");
130 | payload.seq.toInt().should.eql(1);
131 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
132 | simpleFlow[1].enableStoreForward = false;
133 | done();
134 | }
135 | });
136 | }); // it end
137 |
138 | it('should buffer Broker Connection is online', function (done) {
139 | client = mqtt.connect(testBroker);
140 | // WARN! We'll enable buffering for all tests
141 | simpleFlow[1].enableStoreForward = false;
142 | simpleFlow[1].manualEoNBirth = true;
143 | // Intialzie
144 | // Send 5 messages
145 | // Connect after 5 seconds
146 | // VERIFY Birth has the "newest Data"
147 | // Verify that all the NDATA messages are send
148 |
149 | var initBirthDone = false;
150 | let n1;
151 | let b1;
152 | waitOver = false;
153 |
154 | client.on('connect', function () {
155 | client.subscribe('#', function (err) {
156 | if (!err) {
157 | helper.load(sparkplugNode, simpleFlow, function () {
158 | try {
159 | n1 = helper.getNode("n1");
160 | b1 = n1.brokerConn;
161 | n1.bufferDevice = true;
162 | setTimeout(() => {
163 | waitOver = true;
164 | n1.receive({
165 | "command" : {
166 | "node" : {
167 | "connect" : true
168 | }
169 | }
170 | })
171 | }, 500);
172 |
173 | for (let index = 0; index < 5; index++) {
174 | setTimeout(() => {
175 | // Send all metrics to trigger DBIRTH
176 |
177 | n1.receive({
178 | "payload" : {
179 | "metrics": [
180 | {
181 | "name": "test",
182 | "value": 1*index,
183 | },
184 | {
185 | "name": "test2",
186 | "value": 1*index
187 | }
188 | ]}
189 | });
190 | }, index*50);
191 | }
192 | }catch (e) {
193 | done(e);
194 | }
195 | });
196 | }
197 | });
198 | });
199 |
200 | step = -1;
201 | client.on('message', function (topic, message) {
202 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2") {
203 | var buffer = Buffer.from(message);
204 | var payload = spPayload.decodePayload(buffer);
205 | step.should.eql(step++);
206 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2"){
207 | var buffer = Buffer.from(message);
208 | var payload = spPayload.decodePayload(buffer);
209 | payload.metrics[0].value.should.eql(step++);
210 | payload.metrics[0].hasOwnProperty("timestamp").should.be.true();
211 | if (step == 5) {
212 | done();
213 | }
214 |
215 | }
216 | });
217 | }); // it end
218 |
219 |
220 | it('should NBIRTH when primary SCADA BECOMES ONLINE', function (done) {
221 | client = mqtt.connect(testBroker);
222 | expectedMessageId = 0;
223 |
224 | // WARN! We'll enable buffering for all tests
225 | simpleFlow[1].enableStoreForward = true;
226 | simpleFlow[1].manualEoNBirth = false;
227 | // SET OFFLINE
228 | // Send Birth
229 | // SET SCADA ONLINE
230 | // VERIFY BIRTH is send when ONLINE
231 |
232 | var initBirthDone = false;
233 | let n1;
234 | let b1;
235 | client.on('connect', function () {
236 | client.publish("STATE/MY SCADA", "OFFLINE", true);
237 | // Set Online after 250ms
238 | setTimeout(() => client.publish("STATE/MY SCADA", "ONLINE", true), 500);
239 | client.subscribe('#', function (err) {
240 | if (!err) {
241 | helper.load(sparkplugNode, simpleFlow, function () {
242 | try {
243 | n1 = helper.getNode("n1");
244 | b1 = n1.brokerConn;
245 |
246 | // Send all metrics to trigger DBIRTH
247 | n1.receive({
248 | "payload" : {
249 | "metrics": [
250 | {
251 | "name": "test",
252 | "value": 11,
253 | },
254 | {
255 | "name": "test2",
256 | "value": 11
257 | }
258 | ]}
259 | });
260 |
261 | }catch (e) {
262 | done(e);
263 | }
264 | });
265 | }
266 | })
267 |
268 |
269 | });
270 |
271 | client.on('message', function (topic, message) {
272 | switch (expectedMessageId++)
273 | {
274 | case 0:
275 | topic.should.equal("STATE/MY SCADA")
276 | break;
277 | case 1:
278 | topic.should.equal("spBv1.0/My Devices/NBIRTH/Node-Red")
279 | var buffer = Buffer.from(message);
280 | var payload = spPayload.decodePayload(buffer);
281 | payload.should.have.property("seq");
282 | payload.seq.toInt().should.eql(0);
283 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
284 | break;
285 | case 2:
286 | topic.should.equal("spBv1.0/My Devices/DBIRTH/Node-Red/TEST2")
287 | var buffer = Buffer.from(message);
288 | var payload = spPayload.decodePayload(buffer);
289 | payload.should.have.property("seq");
290 | payload.seq.toInt().should.eql(1);
291 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
292 | break;
293 | case 3:
294 | topic.should.equal("spBv1.0/My Devices/DDATA/Node-Red/TEST2")
295 | var buffer = Buffer.from(message);
296 | var payload = spPayload.decodePayload(buffer);
297 | payload.should.have.property("seq");
298 | payload.seq.toInt().should.eql(2);
299 | n1.brokerConn.primaryScadaStatus.should.eql("ONLINE");
300 | done();
301 | break;
302 | }
303 | });
304 | }); // it end
305 |
306 | });
--------------------------------------------------------------------------------
/locales/en-US/mqtt-sparkplug-plus.html:
--------------------------------------------------------------------------------
1 |
168 |
169 |
188 |
189 |
215 |
216 |
242 |
--------------------------------------------------------------------------------
/test/sparkplug_EoN_command_spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var mqtt = require("mqtt");
5 | var pako = require('pako');
6 |
7 | var spPayload = require('sparkplug-payload').get("spBv1.0");
8 | helper.init(require.resolve('node-red'));
9 | let testBroker = 'mqtt://localhost';
10 | var client = null;
11 |
12 | describe('mqtt sparkplug EoN - Commands', function () {
13 | beforeEach(function (done) {
14 | helper.startServer(done);
15 | });
16 |
17 | afterEach(function (done) {
18 | helper.unload();
19 | helper.stopServer(done);
20 | if (client) {
21 | client.end();
22 | }
23 | });
24 | var simpleFlow = [
25 | {
26 | "id": "n1",
27 | "type": "mqtt sparkplug device",
28 | "name": "TEST2",
29 | "metrics": {
30 | "test": {
31 | "dataType": "Int32"
32 | },
33 | "test2": {
34 | "dataType": "Int32"
35 | }
36 | },
37 | "broker": "b1",
38 | "wires": [["n2"]]
39 | },
40 | {
41 | "id": "b1",
42 | "type": "mqtt-sparkplug-broker",
43 | "name": "Local Host",
44 | "deviceGroup": "My Devices",
45 | "eonName": "Node-Red",
46 | "broker": "localhost",
47 | "port": "1883",
48 | "clientid": "",
49 | "usetls": false,
50 | "protocolVersion": "4",
51 | "keepalive": "60",
52 | "cleansession": true,
53 | "enableStoreForward": false,
54 | "primaryScada": "MY SCADA"
55 | },
56 | {
57 | "id": "o1",
58 | "type": "mqtt sparkplug out",
59 | "broker": "b1",
60 | "wires": []
61 | },
62 | { id: "n2", type: "helper" }
63 | ];
64 |
65 | it('should not birth until connect', function (done) {
66 |
67 | flow = simpleFlow;
68 | flow[1].manualEoNBirth = true;
69 | client = mqtt.connect(testBroker);
70 |
71 | let n1;
72 | let b1;
73 | var waitOver = false;
74 | client.on('connect', function () {
75 | client.subscribe('#', function (err) {
76 | if (!err) {
77 | helper.load(sparkplugNode, simpleFlow, function () {
78 |
79 |
80 | try {
81 | n1 = helper.getNode("n1");
82 | b1 = n1.brokerConn;
83 |
84 | setTimeout(() => {
85 | waitOver = true;
86 | n1.receive({
87 | "command" : {
88 | "node" : {
89 | "connect" : true
90 | }
91 | }
92 | })
93 | }, 500);
94 | }catch (e) {
95 | done(e);
96 | }
97 | });
98 | }
99 | })
100 | });
101 |
102 |
103 |
104 | client.on('message', function (topic, message) {
105 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red") {
106 | waitOver.should.be.true();
107 | done();
108 | }
109 | });
110 | });
111 |
112 | it('should rebirth on new name', function (done) {
113 |
114 | simpleFlow = simpleFlow;
115 | simpleFlow[1].manualEoNBirth = false;
116 | simpleFlow[0].birthImmediately = true;
117 | client = mqtt.connect(testBroker);
118 | let n1;
119 | let b1;
120 | var waitOver = false;
121 | client.on('connect', function () {
122 | client.subscribe('#', function (err) {
123 | if (!err) {
124 | helper.load(sparkplugNode, simpleFlow, function () {
125 |
126 |
127 | try {
128 | n1 = helper.getNode("n1");
129 | b1 = n1.brokerConn;
130 |
131 |
132 | setTimeout(() => {
133 | waitOver = true;
134 | n1.receive({
135 | "command" : {
136 | "node" : {
137 | "set_name" : "NEW_NAME",
138 | "set_group" : "NEW_GROUP"
139 | }
140 | }
141 | })
142 | }, 500);
143 | }catch (e) {
144 | done(e);
145 | }
146 | });
147 | }
148 | })
149 | });
150 | var stateId = 0;
151 | client.on('message', function (topic, message) {
152 |
153 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red") {
154 | stateId.should.eql(0);
155 |
156 | var buffer = Buffer.from(message);
157 | var payload = spPayload.decodePayload(buffer);
158 |
159 | let bd = payload.metrics.find(x=>x.name == "bdSeq");
160 | bd.value.low.should.eql(0);
161 |
162 | stateId++
163 | }
164 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2") {
165 | stateId.should.eql(1);
166 | stateId++
167 | }
168 | if (topic === "spBv1.0/My Devices/NDEATH/Node-Red") {
169 | stateId.should.eql(2);
170 |
171 | var buffer = Buffer.from(message);
172 | var payload = spPayload.decodePayload(buffer);
173 | let bd = payload.metrics.find(x=>x.name == "bdSeq");
174 | bd.value.low.should.eql(0);
175 |
176 | stateId++
177 | }
178 | if (topic === "spBv1.0/NEW_GROUP/NBIRTH/NEW_NAME") {
179 | stateId.should.eql(3);
180 |
181 | var buffer = Buffer.from(message);
182 | var payload = spPayload.decodePayload(buffer);
183 | let bd = payload.metrics.find(x=>x.name == "bdSeq");
184 | bd.value.low.should.eql(0); // BDSeq should not increase on REBIRTH
185 |
186 | stateId++
187 | }
188 |
189 | if (topic === "spBv1.0/NEW_GROUP/DBIRTH/NEW_NAME/TEST2") {
190 | stateId.should.eql(4);
191 | done();
192 | }
193 |
194 | });
195 | });
196 |
197 | it('should subscribe on new node topic (Node name change)', function (done) {
198 |
199 | simpleFlow[1].manualEoNBirth = true;
200 | simpleFlow[0].birthImmediately = true;
201 |
202 | helper.load(sparkplugNode, simpleFlow, function () {
203 |
204 | var n1 = helper.getNode("n1");
205 | var o1 = helper.getNode("o1");
206 |
207 | var n2 = helper.getNode("n2");
208 | n2.on("input", function (msg) {
209 | msg.topic.should.eql("spBv1.0/My Devices/DCMD/NEW_NAME/TEST2")
210 | done();
211 |
212 | });
213 |
214 |
215 | n1.receive({
216 | "command" : {
217 | "node" : {
218 | "set_name" : "NEW_NAME",
219 | "connect" : true
220 | }
221 | },
222 | "payload" : [
223 | {
224 | "name": "test",
225 | "value": 11,
226 | },
227 | {
228 | "name": "test2",
229 | "value": 11
230 | }
231 | ]
232 | }) ;
233 | setTimeout(() => {
234 |
235 | var c1 = n1.brokerConn.client;
236 | // Send on old topic and new topic to make sure it only subscribes to new topic
237 | c1.connected.should.be.true();
238 | o1.receive({
239 | topic : "spBv1.0/My Devices/DCMD/Node-Red/TEST2",
240 | payload : {
241 | "metrics" : [
242 | {
243 | "name": "test",
244 | "value": 500,
245 | "type" : "Int32"
246 | }
247 | ]
248 | }
249 |
250 | });
251 |
252 | o1.receive({
253 | "topic" : "spBv1.0/My Devices/DCMD/RANDOM/TEST2",
254 | payload : {
255 | "metrics" : [
256 | {
257 | "name": "test",
258 | "value": 500,
259 | "type" : "Int32"
260 | }
261 | ]
262 | }
263 | });
264 |
265 | o1.receive({
266 | "topic" : "spBv1.0/My Devices/DCMD/NEW_NAME/TEST2",
267 | payload : {
268 | "metrics" : [
269 | {
270 | "name": "test",
271 | "value": 500,
272 | "type" : "Int32"
273 | }
274 | ]
275 | }
276 | });
277 | }, 200);
278 | });
279 | });
280 |
281 | it('should subscribe on new node topic (Device name change)', function (done) {
282 |
283 | simpleFlow[1].manualEoNBirth = true;
284 | simpleFlow[0].birthImmediately = true;
285 |
286 | helper.load(sparkplugNode, simpleFlow, function () {
287 |
288 | var n1 = helper.getNode("n1");
289 | var o1 = helper.getNode("o1");
290 |
291 | var n2 = helper.getNode("n2");
292 | n2.on("input", function (msg) {
293 | msg.topic.should.eql("spBv1.0/My Devices/DCMD/Node-Red/NEW_NAME")
294 | done();
295 |
296 | });
297 | n1.receive({
298 | "command" : {
299 | "node" : {
300 | "connect" : true
301 | },
302 | "device" : {
303 | "set_name" : "NEW_NAME"
304 | }
305 | },
306 | "payload" : [
307 | {
308 | "name": "test",
309 | "value": 11,
310 | },
311 | {
312 | "name": "test2",
313 | "value": 11
314 | }
315 | ]
316 | }) ;
317 | setTimeout(() => {
318 |
319 | var c1 = n1.brokerConn.client;
320 | // Send on old topic and new topic to make sure it only subscribes to new topic
321 | c1.connected.should.be.true();
322 | o1.receive({
323 | topic : "spBv1.0/My Devices/DCMD/Node-Red/TEST2",
324 | payload : {
325 | "metrics" : [
326 | {
327 | "name": "test",
328 | "value": 500,
329 | "type" : "Int32"
330 | }
331 | ]
332 | }
333 |
334 | });
335 |
336 | o1.receive({
337 | "topic" : "spBv1.0/My Devices/DCMD/RANDOM/TEST2",
338 | payload : {
339 | "metrics" : [
340 | {
341 | "name": "test",
342 | "value": 500,
343 | "type" : "Int32"
344 | }
345 | ]
346 | }
347 | });
348 |
349 | o1.receive({
350 | "topic" : "spBv1.0/My Devices/DCMD/Node-Red/NEW_NAME",
351 | payload : {
352 | "metrics" : [
353 | {
354 | "name": "test",
355 | "value": 500,
356 | "type" : "Int32"
357 | }
358 | ]
359 | }
360 | });
361 | }, 200);
362 | });
363 | });
364 |
365 | it('should NBIRTH ON REBIRTH CMD', function (done) {
366 |
367 |
368 | /**
369 | * 1.
370 | */
371 | client = mqtt.connect(testBroker);
372 | expectedMessageId = 0;
373 |
374 | // WARN! We'll enable buffering for all tests
375 | simpleFlow[1].enableStoreForward = false;
376 | simpleFlow[1].manualEoNBirth = false;
377 | // SET OFFLINE
378 | // Send Birth
379 | // SET SCADA ONLINE
380 | // VERIFY BIRTH is send when ONLINE
381 |
382 | var initBirthDone = false;
383 | let n1;
384 | let b1;
385 | client.on('connect', function () {
386 | client.publish("STATE/MY SCADA", "OFFLINE", true);
387 | // Set Online after 250ms
388 | client.subscribe('#', function (err) {
389 | if (!err) {
390 | helper.load(sparkplugNode, simpleFlow, function () {
391 | try {
392 | n1 = helper.getNode("n1");
393 | b1 = n1.brokerConn;
394 |
395 | // Send all metrics to trigger DBIRTH
396 | n1.receive({
397 | "payload" : {
398 | "metrics": [
399 | {
400 | "name": "test",
401 | "value": 11,
402 | },
403 | {
404 | "name": "test2",
405 | "value": 11
406 | }
407 | ]}
408 | });
409 |
410 | }catch (e) {
411 | done(e);
412 | }
413 | });
414 | }
415 | })
416 |
417 |
418 | });
419 |
420 | client.on('message', function (topic, message) {
421 | switch (expectedMessageId++)
422 | {
423 | case 0:
424 | topic.should.equal("spBv1.0/My Devices/NBIRTH/Node-Red")
425 | var buffer = Buffer.from(message);
426 | var payload = spPayload.decodePayload(buffer);
427 | payload.should.have.property("seq");
428 | payload.seq.toInt().should.eql(0);
429 | break;
430 | case 1:
431 | topic.should.equal("spBv1.0/My Devices/DBIRTH/Node-Red/TEST2")
432 | var buffer = Buffer.from(message);
433 | var payload = spPayload.decodePayload(buffer);
434 | payload.should.have.property("seq");
435 | payload.seq.toInt().should.eql(1);
436 |
437 | // Send a Rebirth Command
438 | var cmdPayload = spPayload.encodePayload({
439 | metrics : [
440 | {
441 | name: "Node Control/Rebirth",
442 | type: "Boolean",
443 | value: true
444 | }
445 | ]
446 | });
447 | client.publish("spBv1.0/My Devices/NCMD/Node-Red", cmdPayload);
448 | break;
449 | case 2: // We Issue a rebirth command
450 | topic.should.equal("spBv1.0/My Devices/NCMD/Node-Red")
451 | break;
452 | case 3: // EON will NDEATH Before new NBIRTH
453 | topic.should.equal("spBv1.0/My Devices/NDEATH/Node-Red")
454 | break;
455 | case 4: // EON will NBIRTH
456 | topic.should.equal("spBv1.0/My Devices/NBIRTH/Node-Red")
457 | break;
458 | case 5: // EON will DBIRTH
459 | topic.should.equal("spBv1.0/My Devices/DBIRTH/Node-Red/TEST2")
460 | done();
461 | break;
462 | }
463 | });
464 | }); // it end
465 | });
466 |
467 |
--------------------------------------------------------------------------------
/test/sparkplug_device_template_spec.js:
--------------------------------------------------------------------------------
1 | /*
2 | var helper = require("node-red-node-test-helper");
3 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
4 | var should = require("should");
5 | var mqtt = require("mqtt");
6 | var pako = require('pako');
7 |
8 | var spPayload = require('sparkplug-payload').get("spBv1.0");
9 | helper.init(require.resolve('node-red'));
10 | let testBroker = 'mqtt://localhost';
11 | var client = null;
12 |
13 |
14 |
15 | describe('mqtt sparkplug device template support', function () {
16 |
17 | beforeEach(function (done) {
18 | helper.startServer(done);
19 | });
20 |
21 | afterEach(function (done) {
22 | helper.unload();
23 | helper.stopServer(done);
24 | if (client) {
25 | client.end();
26 | }
27 | });
28 |
29 | //let validnbirth = {"timestamp":1692110985865,"metrics":[{"name":"MyType","type":"Template","value":{"version":"","templateRef":"Type-IS-A","isDefinition":true,"metrics":[{"name":"D","type":"Template","value":{"version":"","templateRef":"Type-Has-A","isDefinition":false,"metrics":[],"parameters":[]},"timestamp":1692110984675,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"B","type":"Int32","value":33,"timestamp":1691437351927,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"C","type":"Int32","value":22,"timestamp":1691437351927,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"A","type":"Int32","value":null,"timestamp":1692110984676,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984676,"properties":{}},{"name":"Node Control/Next Server","type":"Boolean","value":false,"timestamp":1692110985890},{"name":"Type-Has-A","type":"Template","value":{"version":"","templateRef":"","isDefinition":true,"metrics":[{"name":"C","type":"Int32","value":null,"timestamp":1692110984677,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984677,"properties":{}},{"name":"Type-IS-A","type":"Template","value":{"version":"","templateRef":"","isDefinition":true,"metrics":[{"name":"A","type":"Int32","value":null,"timestamp":1692110984675,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984675,"properties":{}},{"name":"MyTypeInstance","type":"Template","value":{"version":"","templateRef":"MyType","isDefinition":false,"metrics":[{"name":"A","type":"Int32","value":null,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"B","type":"Int32","value":33,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"C","type":"Int32","value":22,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"D","type":"Template","value":{"version":"","templateRef":"Type-Has-A","isDefinition":false,"metrics":[{"name":"C","type":"Int32","value":null,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110985890,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"Type-Has-A-P","type":"Template","value":{"version":"","templateRef":"","isDefinition":true,"metrics":[{"name":"D","type":"Int32","value":null,"timestamp":1692110984676,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984677,"properties":{}},{"name":"Node Info/Transmission Version","type":"String","value":"4.0.13 (b2022092123)","timestamp":1692110985890},{"name":"bdSeq","type":"Int64","value":{"low":1,"high":0,"unsigned":true},"timestamp":1692110985865},{"name":"Node Control/Rebirth","type":"Boolean","value":false,"timestamp":1692110985890}],"seq":0}
30 | let validnbirth = {"timestamp":1692110985865,"metrics":[{"name":"MyType","type":"Template","value":{"version":"","isDefinition":true,"metrics":[{"name":"D","type":"Template","value":{"version":"","templateRef":"Type-Has-A","isDefinition":false,"metrics":[],"parameters":[]},"timestamp":1692110984675,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"B","type":"Int32","value":33,"timestamp":1691437351927,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"C","type":"Int32","value":22,"timestamp":1691437351927,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"A","type":"Int32","value":null,"timestamp":1692110984676,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984676,"properties":{}},{"name":"Node Control/Next Server","type":"Boolean","value":false,"timestamp":1692110985890},{"name":"Type-Has-A","type":"Template","value":{"version":"","isDefinition":true,"metrics":[{"name":"C","type":"Int32","value":null,"timestamp":1692110984677,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984677,"properties":{}},{"name":"Type-IS-A","type":"Template","value":{"version":"","isDefinition":true,"metrics":[{"name":"A","type":"Int32","value":null,"timestamp":1692110984675,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984675,"properties":{}},{"name":"MyTypeInstance","type":"Template","value":{"version":"","templateRef":"MyType","isDefinition":false,"metrics":[{"name":"A","type":"Int32","value":null,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"B","type":"Int32","value":33,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"C","type":"Int32","value":22,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"D","type":"Template","value":{"version":"","templateRef":"Type-Has-A","isDefinition":false,"metrics":[{"name":"C","type":"Int32","value":null,"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110985865,"isHistorical":false,"isTransient":false,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110985890,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}},{"name":"Type-Has-A-P","type":"Template","value":{"version":"","isDefinition":true,"metrics":[{"name":"D","type":"Int32","value":null,"timestamp":1692110984676,"metadata":{"isMultiPart":false,"contentType":"","size":{"low":0,"high":0,"unsigned":true},"seq":{"low":0,"high":0,"unsigned":true},"fileName":"","fileType":"","md5":"","description":""},"properties":{}}],"parameters":[]},"timestamp":1692110984677,"properties":{}},{"name":"Node Info/Transmission Version","type":"String","value":"4.0.13 (b2022092123)","timestamp":1692110985890},{"name":"bdSeq","type":"Int64","value":{"low":1,"high":0,"unsigned":true},"timestamp":1692110985865},{"name":"Node Control/Rebirth","type":"Boolean","value":false,"timestamp":1692110985890}],"seq":0}
31 |
32 | /**
33 | * Template Testing...
34 | *
35 | * 1. OK - Test that template defintions are send correctly on NBIRTH
36 | * 2. Test that template instance is send correctly on DBIRTH
37 | * a. Test Valid
38 | * b. Test that invalid instance names throws error.
39 | * c. Test with birth immidiatly
40 | * 3. Test that templates metrics are send correctly on DDATA (Test with one and more)
41 | */
42 | /*
43 | templateFlow = [
44 | {
45 | "id": "n1",
46 | "type": "mqtt sparkplug device",
47 | "metrics": {
48 | "a": {
49 | "dataType": "MyTemplate"
50 | },
51 | "b": {
52 | "dataType": "Int32"
53 | }
54 | },
55 | "name" : "TheDevice",
56 | "broker": "b1",
57 | "birthImmediately": false,
58 | },
59 | {
60 | "id": "b1",
61 | "type": "mqtt-sparkplug-broker",
62 | "deviceGroup": "My Devices",
63 | "eonName": "Node-Red",
64 | "broker": "localhost",
65 | "port": "1883",
66 | "clientid": "",
67 | "usetls": false,
68 | "protocolVersion": "4",
69 | "keepalive": "60",
70 | "cleansession": true,
71 | "enableStoreForward": false,
72 | "compressAlgorithm": "",
73 | "aliasMetrics": false,
74 | "templates": [
75 | "{\"name\":\"MyTemplate\",\"type\":\"Template\",\"value\":{\"version\":\"1.0.0\",\"isDefinition\":true,\"metrics\":[{\"name\":\"FirstTag\",\"type\":\"Int32\"},{\"name\":\"SecondTag\",\"type\":\"Int32\"}],\"parameters\":[]}}"
76 | ],
77 | "primaryScada": "",
78 | "credentials": {}
79 | }
80 | ]
81 |
82 | validateTemplate = function(root, template) {
83 |
84 | let shouldBeDefinition = root === "";
85 |
86 | if (!template.hasOwnProperty("name")) {
87 | throw "Template ust have a name"; // Can't find requirement for this in the spec, but i guess is obvius?
88 | }
89 | let name = `${root}$>>{template.name}`;
90 |
91 | let isTemplate = template.hasOwnProperty("type") && template.type === "Template";
92 | if (!(isTemplate)) {
93 | throw `${name} : A Template Definition MUST type and type must be template`;
94 | }
95 |
96 | let hasValue = template.hasOwnProperty("value") && typeof template.value === 'object';
97 | if (!hasValue) {
98 | throw `${name} : A Template Definition MUST have a value`; // Can't find a requirement for this one
99 | }
100 | let v = template.value;
101 |
102 | // [tck-id-payloads-template-definition-is-definition] A Template Definition MUST have is_definition set to true.
103 | if (!v.hasOwnProperty("isDefinition")) {
104 | throw `${name} : A Template Definition MUST have isDefinition`;
105 | }
106 |
107 | if (v.isDefinition !== true && shouldBeDefinition) {
108 | throw `${name} : A Template Definition MUST have isDefinition set to true`;
109 | }
110 |
111 | if (v.isDefinition === true && !shouldBeDefinition) {
112 | throw `${name} : A Template Instance MUST have isDefinition set to false`;
113 | }
114 |
115 | // [tck-id-payloads-template-definition-ref] A Template Definition MUST omit the template_ref field.
116 | if (v.hasOwnProperty("templateRef") && v.isDefinition) {
117 | throw `${name} : A Template Definition MUST omit the templateRef field`;
118 | }
119 |
120 | // Check Template Ref.
121 | v.metrics.forEach(m => {
122 |
123 | if (!m.hasOwnProperty("name")) {
124 | throw `${name}: Metric must have a name`; // Can't find requirement for this in the spec, but i guess is obvius?
125 | }
126 | let mName = `${name}>>${m.name}`;
127 |
128 | if (!m.hasOwnProperty("type")) {
129 | throw `${mName}: metrics must have a type `;
130 | }
131 |
132 | // if metrics is
133 | if (m.type === "Template") {
134 | validateTemplate(name, m);
135 | }else {
136 | // Validate Metric.
137 | }
138 | })
139 | // for each metrics if template then call otherwise validate
140 | }
141 |
142 | getTemplateMetrics = function(templateDef) {
143 | metrics = [];
144 | templateDef.value.metrics.forEach(m => {
145 | if (m.type == Template) {
146 | metrics = metrics.concat(getTemplateMetrics(m));
147 | }
148 | else {
149 | metrics.push(m);
150 | }
151 | })
152 | }
153 |
154 | /**
155 | * Takes a list of t
156 | */
157 |
158 | /*
159 | templateToMetrics = function(templates, root, metrics) {
160 | console.log("Inflating,", metrics);
161 | result = [];
162 | Object.keys(metrics).forEach(m => {
163 |
164 | metric = metrics[m];
165 | if (metric.type === "Template") {
166 | // Get Metrics for
167 | let dt = m.dataType;
168 | let t = templates.find((e) => e.hasOwnProperty("name") && e.name == dt);
169 |
170 | if (t == undefined) {
171 | throw "Template not found";
172 | }
173 |
174 | result = result.concat(getTemplateMetrics(metric));
175 |
176 | }else {
177 | result.push(m);
178 | }
179 |
180 | });
181 | return result;
182 | }
183 |
184 | // [tck-id-topics-nbirth-templates] If Template instances will be published by this Edge Node or any devices, all Template definitions MUST be published in the NBIRTH.
185 | // [tck-id-payloads-template-definition-nbirth-only] Template Definitions MUST only be included in NBIRTH messages.
186 | // [tck-id-payloads-template-definition-members] A Template Definition MUST include all member metrics that will ever be included in corresponding template instances.
187 | // [tck-id-payloads-template-definition-nbirth] A Template Definition MUST be included in the NBIRTH for all Template Instances that are included in the NBIRTH and DBIRTH messages.
188 | // [tck-id-payloads-template-definition-parameters] A Template Definition MUST include all parameters that will be included in the corresponding Template Instances.
189 | // • [tck-id-payloads-template-definition-parameters-default] A Template Definition MAY include values for parameters in the Template Definition parameters.
190 |
191 | /*
192 |
193 | [tck-id-payloads-template-instance-is-definition] A Template Instance MUST have is_definition set to false.
194 | • [tck-id-payloads-template-instance-ref] A Template Instance MUST have template_ref set to the type of template definition it is.
195 | • [tck-id-payloads-template-instance-members] A Template Instance MUST include only members that were included in the corresponding template definition.
196 | • [tck-id-payloads-template-instance-members-birth] A Template Instance in a NBIRTH or DBIRTH message MUST include all members that were included in the corresponding Template Definition.
197 | • [tck-id-payloads-template-instance-members-data] A Template Instance in a NDATA or DDATA message MAY include only a subset of the members that were included in the corresponding template definition.
198 | • [tck-id-payloads-template-instance-parameters] A Template Instance MAY include parameter values for any parameters that were included in the corresponding Template Definition.
199 | */
200 |
201 | // Loop though each template.
202 | //
203 | /*
204 | it('TestTemplate', function (done) {
205 | // Create a list of all templates (to make sure references are correct)
206 | validnbirth.metrics.forEach(x=> {
207 |
208 |
209 | if (x.type === "Template" && x.value.isDefinition === true) {
210 | console.log(`validating ${x.name}`)
211 | validateTemplate("", x);
212 | console.log("Complate");
213 | }else if (x.type === "Template" && x.value.isDefinition === false) {
214 | console.log(`instance ${x.name}`)
215 | }else {
216 | console.log(`skipping ${x.name}`)
217 | }
218 | })
219 |
220 | done();
221 | });
222 |
223 | it('TestTemplateInflate', function (done) {
224 | let metrics = templateFlow[0].metrics;
225 | let templates = JSON.parse(templateFlow[1].templates);
226 |
227 | result = templateToMetrics(templates, "", metrics);
228 | console.log(result);
229 | /*console.log(templateFlow);
230 | Object.keys(metrics).forEach(x=> {
231 | xx = metrics[x];
232 | console.log("working", xx);
233 | // templateToMetrics = function(templates, root, metrics) {
234 |
235 | console.log(x);
236 | })ss
237 |
238 | });
239 | /***
240 | *
241 | it('Should send template on NBIRTH', function (done) {
242 |
243 | var n1 = null;
244 | client = mqtt.connect(testBroker);
245 |
246 | client.on('connect', function () {
247 | client.subscribe("spBv1.0/My Devices/#", function (err) {
248 | if (!err) {
249 | helper.load(sparkplugNode, templateFlow, function () {
250 | n1 = helper.getNode("n1");
251 | });
252 | }
253 | });
254 | });
255 |
256 | client.on('message', function (topic, message) {
257 | topic.should.eql("spBv1.0/My Devices/NBIRTH/Node-Red");
258 |
259 | var buffer = Buffer.from(message);
260 | var payload = spPayload.decodePayload(buffer);
261 | payload.metrics.should.deepEqual([
262 | { name: 'MyTemplate', type: 'Template', value: {
263 | "version": "1.0.0",
264 | "isDefinition": true,
265 | "metrics": [
266 | {
267 | "name": "FirstTag",
268 | "type": "Int32",
269 | value : 0
270 | },
271 | {
272 | "name": "SecondTag",
273 | "type": "Int32",
274 | value : 0
275 | }
276 | ],
277 | "parameters": [],
278 | "templateRef": ""
279 | } },
280 | { name: 'Node Control/Rebirth', type: 'Boolean', value: false },
281 | { name: 'bdSeq', type: 'Int8', value: 0 }
282 | ]);
283 | done();
284 | });
285 | });
286 |
287 | it('Should send template instance on DBIRTH (birthImmediately)', function (done) {
288 |
289 | var n1 = null;
290 | client = mqtt.connect(testBroker);
291 |
292 | templateFlow[0].birthImmediately = true;
293 | client.on('connect', function () {
294 | client.subscribe("spBv1.0/My Devices/#", function (err) {
295 | if (!err) {
296 | helper.load(sparkplugNode, templateFlow, function () {
297 | n1 = helper.getNode("n1");
298 | });
299 | }
300 | });
301 | });
302 |
303 | client.on('message', function (topic, message) {
304 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TheDevice") {
305 | var buffer = Buffer.from(message);
306 | var payload = spPayload.decodePayload(buffer);
307 | done();
308 | }
309 | });
310 | });
311 |
312 |
313 |
314 | it('Should send template instance on DBIRTH)', function (done) {
315 |
316 | var n1 = null;
317 | client = mqtt.connect(testBroker);
318 |
319 | templateFlow[0].birthImmediately = false;
320 | client.on('connect', function () {
321 | client.subscribe("spBv1.0/My Devices/#", function (err) {
322 | if (!err) {
323 | helper.load(sparkplugNode, templateFlow, function () {
324 | n1 = helper.getNode("n1");
325 | n1.receive({
326 | "payload" : {
327 | "metrics": [
328 | {
329 | "name": "b",
330 | "value": 11,
331 | },
332 | {
333 | "name": "a/FirstTag",
334 | "type": "Int32",
335 | value : 2
336 | },
337 | {
338 | "name": "a/SecondTag",
339 | "type": "Int32",
340 | value : 3
341 | }
342 | ]}
343 | }
344 | );
345 | });
346 | }
347 | });
348 | });
349 |
350 | client.on('message', function (topic, message) {
351 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TheDevice") {
352 | var buffer = Buffer.from(message);
353 | var payload = spPayload.decodePayload(buffer);
354 | console.log(payload);
355 |
356 | "1".should.eql("2");
357 | //done();
358 | }
359 | });
360 | });
361 |
362 | })
363 | */
364 |
365 |
366 |
367 |
--------------------------------------------------------------------------------
/mqtt-sparkplug-plus.html:
--------------------------------------------------------------------------------
1 |
13 |
20 |
21 |
112 |
113 |
352 |
353 |
354 |
397 |
398 |
546 |
547 |
548 |
574 |
575 |
606 |
607 |
608 |
641 |
642 |
--------------------------------------------------------------------------------
/test/sparkplug_device__spec.js:
--------------------------------------------------------------------------------
1 | var helper = require("node-red-node-test-helper");
2 | var sparkplugNode = require("../mqtt-sparkplug-plus.js");
3 | var should = require("should");
4 | var long = require("long");
5 | var mqtt = require("mqtt");
6 | var pako = require('pako');
7 |
8 | var spPayload = require('sparkplug-payload').get("spBv1.0");
9 | helper.init(require.resolve('node-red'));
10 | let testBroker = 'mqtt://localhost';
11 | var client = null;
12 |
13 | describe('mqtt sparkplug device node', function () {
14 |
15 | beforeEach(function (done) {
16 | helper.startServer(done);
17 | });
18 |
19 | afterEach(function (done) {
20 | helper.unload();
21 | helper.stopServer(done);
22 | if (client) {
23 | client.end();
24 | }
25 | });
26 |
27 | var simpleFlow = [
28 | {
29 | "id": "n1",
30 | "type": "mqtt sparkplug device",
31 | "name": "TEST2",
32 | "metrics": {
33 | "test": {
34 | "dataType": "Int32"
35 | },
36 | "test2": {
37 | "dataType": "Int32"
38 | }
39 | },
40 | "broker": "b1"
41 | },
42 | {
43 | "id": "b1",
44 | "type": "mqtt-sparkplug-broker",
45 | "name": "Local Host",
46 | "deviceGroup": "My Devices",
47 | "eonName": "Node-Red",
48 | "broker": "localhost",
49 | "port": "1883",
50 | "clientid": "",
51 | "usetls": false,
52 | "protocolVersion": "4",
53 | "keepalive": "60",
54 | "cleansession": true,
55 | "enableStoreForward": false,
56 | "primaryScada": "MY SCADA"
57 | }
58 | ];
59 |
60 | it('should be loaded', function (done) {
61 | var flow = [{ id: "n1", type: "mqtt sparkplug device", name: "device" }];
62 | helper.load(sparkplugNode, flow, function () {
63 | var n1 = helper.getNode("n1");
64 | n1.should.have.property('name', 'device');
65 | done();
66 | });
67 | });
68 |
69 | /**
70 | * Verify NBirth is send when starting up Node-Red with a Device loaded.
71 | */
72 | it('should send NBirth message', function (done) {
73 | client = mqtt.connect(testBroker);
74 | let n1;
75 | let b1;
76 | client.on('connect', function () {
77 | client.subscribe('#', function (err) {
78 | if (!err) {
79 | helper.load(sparkplugNode, simpleFlow, function () {
80 | try {
81 | n1 = helper.getNode("n1");
82 | b1 = n1.brokerConn;
83 | }catch (e) {
84 | done(e);
85 | }
86 | });
87 | }
88 | })
89 | });
90 |
91 | client.on('message', function (topic, message) {
92 | // Verify that we sent a DBirth Message to the broker
93 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red"){
94 | var buffer = Buffer.from(message);
95 | var payload = spPayload.decodePayload(buffer);
96 |
97 | payload.should.have.property("timestamp");
98 | long.isLong(payload.timestamp).should.be.true();
99 |
100 | payload.should.have.property("seq");
101 | payload.seq.toInt().should.eql(0);
102 |
103 | payload.metrics.should.containDeep([
104 | { name: 'Node Control/Rebirth', type: 'Boolean', value: false },
105 | { name: 'bdSeq', type: 'Int64' } // We don't check nmber
106 | ]);
107 |
108 |
109 | payload.metrics[1].value.toInt().should.eql(0);
110 |
111 | done();
112 | client.end();
113 | }
114 | });
115 |
116 | }); // it end
117 |
118 | /**
119 | * Verify NBirth is send when starting up Node-Red with a Device loaded.
120 | */
121 | it('only sends one NBirth message', function (done) {
122 | client = mqtt.connect(testBroker);
123 | let n1;
124 | let b1;
125 | let firstMessage = true;
126 |
127 | client.on('connect', function () {
128 | client.subscribe('#', function (err) {
129 | if (!err) {
130 | helper.load(sparkplugNode, simpleFlow, function () {
131 | try {
132 | n1 = helper.getNode("n1");
133 | b1 = n1.brokerConn;
134 | // TIME
135 | setTimeout(() => {
136 | client.end();
137 | done()
138 | }, 500);
139 |
140 | }catch (e) {
141 | done(e);
142 | }
143 | });
144 | }
145 | })
146 | });
147 |
148 | client.on('message', function (topic, message) {
149 | // Verify that we sent a DBirth Message to the broker
150 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red"){
151 |
152 | firstMessage.should.equal(true);
153 | firstMessage = false;
154 | }
155 | });
156 |
157 | }); // it end
158 |
159 | it('should send DBirth message', function (done) {
160 | client = mqtt.connect(testBroker);
161 | let n1;
162 | let b1;
163 | client.on('connect', function () {
164 | client.subscribe('#', function (err) {
165 | if (!err) {
166 | helper.load(sparkplugNode, simpleFlow, function () {
167 | try {
168 | n1 = helper.getNode("n1");
169 | b1 = n1.brokerConn;
170 |
171 | n1.receive({
172 | "payload" : {
173 | "metrics": [
174 | {
175 | "name": "test",
176 | "value": 11
177 | },
178 | {
179 | "name": "test2",
180 | "value": 11
181 | }
182 | ]}
183 | }
184 | );
185 | }catch (e) {
186 | done(e);
187 | }
188 | });
189 | }
190 | })
191 | });
192 |
193 | client.on('message', function (topic, message) {
194 | // Verify that we sent a DBirth Message to the broker
195 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
196 | var buffer = Buffer.from(message);
197 | var payload = spPayload.decodePayload(buffer);
198 |
199 | payload.should.have.property("timestamp");
200 | payload.timestamp.toInt().should.be.a.Number();
201 | payload.metrics.should.containDeep([{
202 | name: 'test',
203 | type: 'Int32',
204 | value: 11,
205 | //timestamp: 1630716767231
206 | },
207 | {
208 | name: 'test2',
209 | type: 'Int32',
210 | value: 11,
211 | //timestamp: 1630716767232
212 | }
213 | ]);
214 | done();
215 | //client.end();
216 | }
217 |
218 | });
219 |
220 | }); // it end
221 |
222 | it('should not send DBirth if no metrics', function (done) {
223 |
224 | flow = JSON.parse(JSON.stringify(simpleFlow));
225 | flow[0].metrics = {}; // N
226 |
227 | Object.keys(flow[0].metrics).length.should.eql(0);
228 |
229 | client = mqtt.connect(testBroker);
230 | let n1;
231 | let b1;
232 | client.on('message', function (topic, message) {
233 | topic.should.not.eql("spBv1.0/My Devices/DBIRTH/Node-Red/TEST2")
234 | });
235 | client.on('connect', function () {
236 | client.subscribe('#', function (err) {
237 | if (!err) {
238 | helper.load(sparkplugNode, flow, function () {
239 | try {
240 | n1 = helper.getNode("n1");
241 | b1 = n1.brokerConn;
242 |
243 | n1.receive({
244 | "payload" : {
245 | "metrics": [
246 | /*{
247 | "name": "test",
248 | "value": 11
249 | },
250 | {
251 | "name": "test2",
252 | "value": 11
253 | }*/
254 | ]}
255 | }
256 | );
257 | }catch (e) {
258 | done(e);
259 | }
260 | });
261 | }
262 | })
263 | });
264 | setTimeout(function() {
265 | done();
266 | }, 500);
267 |
268 |
269 | }); // it end
270 |
271 | it('should not birth when metrics missing and birthImmediately = false', function (done) {
272 | client = mqtt.connect(testBroker);
273 | let n1;
274 | let b1;
275 | client.on('connect', function () {
276 | client.subscribe('#', function (err) {
277 | if (!err) {
278 | helper.load(sparkplugNode, simpleFlow, function () {
279 | try {
280 | n1 = helper.getNode("n1");
281 | b1 = n1.brokerConn;
282 | b1.birthImmediately = false;
283 | setTimeout(() => done(), 500);
284 | }catch (e) {
285 | done(e);
286 | }
287 | });
288 | }
289 | })
290 | });
291 |
292 | client.on('message', function (topic, message) {
293 | // Verify that we sent a DBirth Message to the broker
294 | topic.should.not.equal("spBv1.0/My Devices/DBIRTH/Node-Red/TEST2")
295 |
296 | });
297 |
298 | }); // it
299 |
300 | it('should send DBirth message when birth immediately is set', function (done) {
301 | client = mqtt.connect(testBroker);
302 | let n1;
303 | let b1;
304 |
305 | flow = JSON.parse(JSON.stringify(simpleFlow));
306 | flow[0].birthImmediately = true;
307 |
308 | // Add Extra node to test
309 | extraDevice = JSON.parse(JSON.stringify(simpleFlow[0]));
310 | extraDevice.id = "n3";
311 | extraDevice.name = "TEST2"
312 |
313 | client.on('connect', function () {
314 | client.subscribe('#', function (err) {
315 | if (!err) {
316 | helper.load(sparkplugNode, flow, function () {
317 | try {
318 | n1 = helper.getNode("n1");
319 | b1 = n1.brokerConn;
320 | }catch (e) {
321 | done(e);
322 | }
323 | });
324 | }
325 | })
326 | });
327 |
328 | client.on('message', function (topic, message) {
329 |
330 | // Should only BIRTH device with birthImmi attribute set.
331 | topic.should.not.eql("spBv1.0/My Devices/DBIRTH/Node-Red/TEST3");
332 |
333 | // Verify that we sent a DBirth Message to the broker
334 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
335 | var buffer = Buffer.from(message);
336 | var payload = spPayload.decodePayload(buffer);
337 | payload.should.have.property("timestamp");
338 | payload.timestamp.toInt().should.be.Number();
339 | done();
340 | //client.end();
341 | }
342 |
343 | });
344 |
345 | }); // it
346 |
347 | it('should send Properties in DBIRTH message', function (done) {
348 | client = mqtt.connect(testBroker);
349 | let ts = Date.now();
350 | let n1;
351 | let b1;
352 | client.on('connect', function () {
353 | client.subscribe('#', function (err) {
354 | if (!err) {
355 | helper.load(sparkplugNode, simpleFlow, function () {
356 | try {
357 | n1 = helper.getNode("n1");
358 | b1 = n1.brokerConn;
359 | b1.client.on('connect',function (connack) {
360 | n1.receive({
361 | "definition": {
362 | "TEST/TEST": {
363 | "dataType": "Int32",
364 | "properties": {
365 | "engUnits": {
366 | "type": "string",
367 | "value": "inHg"
368 | }
369 | } // properties end
370 | } // Metrics end
371 | },
372 | "payload" : {
373 | "metrics" : [
374 | {
375 | "name": "TEST/TEST",
376 | "value": 5,
377 | "timestamp" : ts
378 |
379 | }]
380 | }
381 | });
382 | });
383 | }catch (e) {
384 | done(e);
385 | }
386 | });
387 | }
388 | })
389 | });
390 |
391 | client.on('message', function (topic, message) {
392 | // Verify that we sent a DBirth Message with properties to the broker
393 |
394 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
395 | var buffer = Buffer.from(message);
396 | var payload = spPayload.decodePayload(buffer);
397 | payload.should.have.property("timestamp");
398 | payload.timestamp.toInt().should.be.Number();
399 |
400 | payload.should.have.property("seq");
401 | payload.seq.toInt().should.be.eql(1);
402 |
403 | // Hack to remore decode Long to Number
404 | payload.metrics[0].timestamp = payload.metrics[0].timestamp.toNumber();
405 |
406 | payload.metrics.should.containDeep([{
407 | name: 'TEST/TEST',
408 | type: 'Int32',
409 | value: 5,
410 | "properties": {
411 | "engUnits": {
412 | "type": "String",
413 | "value": "inHg"
414 | }
415 | }, // properties end
416 | timestamp: ts
417 | }
418 | ]);
419 |
420 |
421 | helper.getNode("n1").receive({
422 | "payload" : {
423 | "metrics" : [
424 | {
425 | "name": "TEST/TEST",
426 | "value": 15,
427 | "timestamp" : ts
428 |
429 | }]
430 | }
431 | });
432 |
433 | }
434 | else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
435 | // Verify that DDATA is not send on first metrics (with value 5)
436 | // Verify that Properties are not send on DDATA
437 | var buffer = Buffer.from(message);
438 | var payload = spPayload.decodePayload(buffer);
439 | payload.metrics[0].timestamp = payload.metrics[0].timestamp.toNumber();
440 | payload.metrics.should.containDeep([{
441 | name: 'TEST/TEST',
442 | type: 'Int32',
443 | value: 15,
444 | timestamp: ts
445 | }
446 | ]);
447 | done();
448 | }
449 |
450 | });
451 |
452 | }); // it end
453 |
454 | // Should end DDEATH message
455 | it('should send NDEATH messages', function (done) {
456 |
457 | client = mqtt.connect(testBroker);
458 | var initBirthDone = false;
459 | var deathSend = false;
460 | let n1;
461 | let b1;
462 | client.on('connect', function () {
463 | client.subscribe('#', function (err) {
464 | if (!err) {
465 | helper.load(sparkplugNode, simpleFlow, function () {
466 | try {
467 | n1 = helper.getNode("n1");
468 | b1 = n1.brokerConn;
469 |
470 | // Send all metrics to trigger DBIRTH
471 | n1.receive({
472 | "payload" : {
473 | "metrics": [
474 | {
475 | "name": "test",
476 | "value": 11,
477 | },
478 | {
479 | "name": "test2",
480 | "value": 11
481 | }
482 | ]}
483 | }
484 | );
485 | }catch (e) {
486 | done(e);
487 | }
488 | });
489 | }
490 | })
491 | });
492 | var expectedBd = 0;
493 | client.on('message', function (topic, message) {
494 |
495 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red") {
496 | var buffer = Buffer.from(message);
497 | var payload = spPayload.decodePayload(buffer);
498 |
499 | // Check that bdSeq in inceased every time we reconnect
500 | // this is a Long.js object. Not sure why this is a long? but its according to specs.
501 | payload.metrics[1].value.toInt().should.eql(expectedBd);
502 |
503 |
504 | // Force Disconnect
505 | b1.deregister(n1);
506 | } else if (topic === "spBv1.0/My Devices/NDEATH/Node-Red"){
507 | var buffer = Buffer.from(message);
508 | var payload = spPayload.decodePayload(buffer);
509 |
510 | // Check that bdSeq in inceased every time we reconnect
511 | payload.metrics[0].value.toInt().should.eql(expectedBd);
512 | payload.metrics.length.should.eql(1);
513 |
514 | if (expectedBd == 5) {
515 | done();
516 | }
517 | expectedBd++;
518 |
519 |
520 | // Force Reconnect
521 | b1.register(n1);
522 | }
523 | });
524 |
525 | }); // it end
526 |
527 | it('should send REBIRTH messages', function (done) {
528 | client = mqtt.connect(testBroker);
529 | var initBirthDone = false;
530 | var deathSend = false;
531 | let n1;
532 | let b1;
533 | client.on('connect', function () {
534 | client.subscribe('#', function (err) {
535 | if (!err) {
536 | helper.load(sparkplugNode, simpleFlow, function () {
537 | try {
538 | n1 = helper.getNode("n1");
539 | b1 = n1.brokerConn;
540 |
541 | // Send all metrics to trigger DBIRTH
542 | n1.receive({
543 | "payload" : {
544 | "metrics": [
545 | {
546 | "name": "test",
547 | "value": 11,
548 | },
549 | {
550 | "name": "test2",
551 | "value": 11
552 | }
553 | ]}
554 | }
555 | );
556 | }catch (e) {
557 | done(e);
558 | }
559 | });
560 | }
561 | })
562 | });
563 |
564 | client.on('message', function (topic, message) {
565 |
566 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
567 | if (initBirthDone === true) {
568 | var buffer = Buffer.from(message);
569 | var payload = spPayload.decodePayload(buffer);
570 | // Verify that we reset the seq to 0
571 | payload.should.have.property("seq");
572 | payload.seq.toInt().should.be.eql(1);
573 | }
574 | } else if (topic === "spBv1.0/My Devices/NDEATH/Node-Red"){
575 | deathSend = true;
576 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
577 | // Ready to issue rebirth
578 | if (initBirthDone === true) {
579 | var buffer = Buffer.from(message);
580 | var payload = spPayload.decodePayload(buffer);
581 | payload.should.have.property("seq");
582 | payload.seq.toInt().should.be.eql(1);
583 | deathSend.should.eql(true);
584 | done();
585 |
586 | } else {
587 | var rebirth = {
588 | metrics : [
589 | {
590 | "name" : "Node Control/Rebirth",
591 | "type" : "Boolean",
592 | "value": true
593 | },
594 | ]
595 | }
596 | var payload = spPayload.encodePayload(rebirth);
597 |
598 | client.publish("spBv1.0/My Devices/NCMD/Node-Red",payload);
599 | initBirthDone = true;
600 | }
601 | }
602 | });
603 |
604 | }); // it end
605 |
606 | it('should send valid NData in input', function (done) {
607 | client = mqtt.connect(testBroker);
608 | let n1;
609 | let b1;
610 | client.on('connect', function () {
611 | client.subscribe('#', function (err) {
612 | if (!err) {
613 | helper.load(sparkplugNode, simpleFlow, function () {
614 | try {
615 | n1 = helper.getNode("n1");
616 | b1 = n1.brokerConn;
617 | b1.client.on('connect',function (connack) {
618 | // Send all metrics to trigger DBIRTH
619 | n1.receive({
620 | "payload" : {
621 | "metrics": [
622 | {
623 | "name": "test",
624 | "value": 11,
625 | },
626 | {
627 | "name": "test2",
628 | "value": 11
629 | }
630 | ]}
631 | }
632 | );
633 | });
634 | }catch (e) {
635 | done(e);
636 | }
637 | });
638 | }
639 | })
640 | });
641 |
642 | client.on('message', function (topic, message) {
643 | // Verify that we sent a DBirth Message to the broker
644 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
645 | n1.receive({
646 | "payload" : {
647 | "metrics": [
648 | {
649 | "name": "test",
650 | "value": 100,
651 | //"timestamp": new Date()
652 | },
653 | ]}
654 | }
655 | );
656 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
657 | var buffer = Buffer.from(message);
658 | var payload = spPayload.decodePayload(buffer);
659 |
660 |
661 | payload.metrics[0].should.have.property("name").which.is.eql("test");
662 | payload.metrics[0].should.have.property("value").which.is.eql(100);
663 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
664 | //payload.metrics[0].should.have.property("timestamp").which.is.a.Number();
665 | payload.metrics.length.should.eql(1);
666 | Object.keys(payload.metrics[0]).length.should.eql(4);
667 |
668 | payload.should.have.property("timestamp");
669 | long.isLong(payload.timestamp).should.be.true();
670 |
671 | payload.should.have.property("seq");
672 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
673 |
674 | done();
675 | //client.end();
676 | }
677 |
678 | });
679 |
680 | }); // it end
681 |
682 | it('should convert NData timestamp to EPOC', function (done) {
683 | client = mqtt.connect(testBroker);
684 | let n1;
685 | let b1;
686 | client.on('connect', function () {
687 | client.subscribe('#', function (err) {
688 | if (!err) {
689 | helper.load(sparkplugNode, simpleFlow, function () {
690 | try {
691 | n1 = helper.getNode("n1");
692 | b1 = n1.brokerConn;
693 | b1.client.on('connect',function (connack) {
694 | // Send all metrics to trigger DBIRTH
695 | n1.receive({
696 | "payload" : {
697 | "metrics": [
698 | {
699 | "name": "test",
700 | "value": 11,
701 | },
702 | {
703 | "name": "test2",
704 | "value": 11
705 | }
706 | ]}
707 | }
708 | );}
709 | );
710 | }catch (e) {
711 | done(e);
712 | }
713 | });
714 | }
715 | })
716 | });
717 |
718 | client.on('message', function (topic, message) {
719 | // Verify that we sent a DBirth Message to the broker
720 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
721 | n1.receive({
722 | "payload" : {
723 | "metrics": [
724 | {
725 | "name": "test",
726 | "value": 100,
727 | "timestamp": new Date()
728 | },
729 | ]}
730 | }
731 | );
732 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
733 | var buffer = Buffer.from(message);
734 | var payload = spPayload.decodePayload(buffer);
735 | payload.should.have.property("timestamp");
736 | long.isLong(payload.timestamp).should.be.true();
737 | payload.metrics[0].should.have.property("name").which.is.eql("test");
738 | payload.metrics[0].should.have.property("value").which.is.eql(100);
739 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
740 |
741 |
742 | payload.metrics[0].should.have.property("timestamp");
743 | long.isLong(payload.metrics[0].timestamp).should.be.true();
744 |
745 | payload.should.have.property("seq");
746 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
747 |
748 |
749 | payload.metrics.length.should.eql(1);
750 | Object.keys(payload.metrics[0]).length.should.eql(4);
751 |
752 | done();
753 | client.end();
754 | }
755 |
756 | });
757 |
758 | }); // it end
759 |
760 | it('should warn when passing unknown NData metric', function (done) {
761 | helper.load(sparkplugNode, simpleFlow, function () {
762 |
763 | const n1 = helper.getNode("n1");
764 | n1.on('input', () => {
765 | n1.warn.should.be.calledWithExactly("mqtt-sparkplug-plus.errors.device-unknown-metric");
766 | done();
767 | });
768 | n1.receive({
769 | "payload" : {
770 | "metrics": [
771 | {
772 | "name": "does_not_exits",
773 | "value": 100
774 | },
775 | ]}
776 | }
777 | );
778 |
779 | }); // end helper
780 | }); // it end
781 |
782 | it('should warn when passing NData metric without name', function (done) {
783 | helper.load(sparkplugNode, simpleFlow, function () {
784 |
785 | let n1 = helper.getNode("n1");
786 | n1.on('input', () => {
787 | n1.warn.should.be.calledWithExactly("mqtt-sparkplug-plus.errors.missing-attribute-name");
788 | done();
789 | });
790 | n1.receive({
791 | "payload" : {
792 | "metrics": [
793 | {
794 | "value": 100
795 | },
796 | ]}
797 | }
798 | );
799 |
800 | }); // end helper
801 | }); // it end
802 |
803 | it('should error when passing NData metric that is not array', function (done) {
804 | helper.load(sparkplugNode, simpleFlow, function () {
805 |
806 | let n1 = helper.getNode("n1");
807 | n1.receive({
808 | "payload" : {
809 | "metrics": {"A": "B"} }
810 | }
811 | );
812 |
813 | n1.on('call:error', call => {
814 | // XXX
815 | call.firstArg.should.eql("mqtt-sparkplug-plus.errors.device-no-metrics")
816 | done();
817 | });
818 | }); // end helper
819 | }); // it end
820 |
821 | it('should add null_value on DData without value', function (done) {
822 | client = mqtt.connect(testBroker);
823 | let n1;
824 | let b1;
825 | client.on('connect', function () {
826 | client.subscribe('#', function (err) {
827 | if (!err) {
828 | helper.load(sparkplugNode, simpleFlow, function () {
829 | try {
830 | n1 = helper.getNode("n1");
831 | b1 = n1.brokerConn;
832 | b1.client.on('connect',function (connack) {
833 | // Send all metrics to trigger DBIRTH
834 | n1.receive({
835 | "payload" : {
836 | "metrics": [
837 | {
838 | "name": "test",
839 | "value": 11
840 | },
841 | {
842 | "name": "test2",
843 | "value": 11
844 | }
845 | ]}
846 | }
847 | );
848 | });
849 | }catch (e) {
850 | done(e);
851 | }
852 | });
853 | }
854 | })
855 | });
856 |
857 | client.on('message', function (topic, message) {
858 | // Verify that we sent a DBirth Message to the broker
859 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
860 | n1.receive({
861 | "payload" : {
862 | "metrics": [
863 | {
864 | "name": "test",
865 | "value": null
866 | //"timestamp": new Date()
867 | },
868 | ]}
869 | }
870 | );
871 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
872 | try {
873 | var buffer = Buffer.from(message);
874 | var payload = spPayload.decodePayload(buffer);
875 |
876 | payload.should.have.property("timestamp");
877 | long.isLong(payload.timestamp).should.be.true();
878 |
879 | payload.metrics[0].should.have.property("name").which.is.eql("test");
880 | payload.metrics[0].should.have.property("value").which.is.eql(null);
881 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
882 | done();
883 | } catch (e) {
884 | done(e);
885 | }
886 |
887 | }
888 |
889 | });
890 |
891 | }); // it end */
892 |
893 | it('should send valid DEFLATE NData in input2', function (done) {
894 | client = mqtt.connect(testBroker);
895 | simpleFlow[1].compressAlgorithm = "DEFLATE";
896 | let n1;
897 | let b1;
898 | client.on('connect', function () {
899 | client.subscribe('#', function (err) {
900 | if (!err) {
901 | helper.load(sparkplugNode, simpleFlow, function () {
902 | try {
903 | n1 = helper.getNode("n1");
904 | b1 = n1.brokerConn;
905 | b1.client.on('connect',function (connack) {
906 | // Send all metrics to trigger DBIRTH
907 | n1.receive({
908 | "payload" : {
909 | "metrics": [
910 | {
911 | "name": "test",
912 | "value": 11,
913 | },
914 | {
915 | "name": "test2",
916 | "value": 11
917 | }
918 | ]}
919 | }
920 | );
921 | });
922 | }catch (e) {
923 | done(e);
924 | }
925 | });
926 | }
927 | })
928 | });
929 |
930 | client.on('message', function (topic, message) {
931 | // Verify that we sent a DBirth Message to the broker
932 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
933 | n1.receive({
934 | "payload" : {
935 | "metrics": [
936 | {
937 | "name": "test",
938 | "value": 100,
939 | //"timestamp": new Date()
940 | },
941 | ]}
942 | }
943 | );
944 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
945 | var buffer = Buffer.from(message);
946 | var payload = spPayload.decodePayload(buffer);
947 | payload = pako.inflate(payload.body);
948 | buffer = Buffer.from(payload);
949 | payload = spPayload.decodePayload(buffer);
950 |
951 | payload.should.have.property("timestamp");
952 | long.isLong(payload.timestamp).should.be.true();
953 |
954 | payload.should.have.property("seq");
955 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
956 |
957 |
958 | payload.metrics[0].should.have.property("name").which.is.eql("test");
959 | payload.metrics[0].should.have.property("value").which.is.eql(100);
960 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
961 | //payload.metrics[0].should.have.property("timestamp").which.is.a.Number();
962 | payload.metrics.length.should.eql(1);
963 | Object.keys(payload.metrics[0]).length.should.eql(4);
964 |
965 | simpleFlow[1].compressAlgorithm = undefined;
966 | done();
967 | //client.end();
968 | }
969 |
970 | });
971 |
972 | }); // it end
973 |
974 | it('should send valid GZIP NData in input2', function (done) {
975 | client = mqtt.connect(testBroker);
976 | simpleFlow[1].compressAlgorithm = "GZIP";
977 | let n1;
978 | let b1;
979 | client.on('connect', function () {
980 | client.subscribe('#', function (err) {
981 | if (!err) {
982 | helper.load(sparkplugNode, simpleFlow, function () {
983 | try {
984 | n1 = helper.getNode("n1");
985 | b1 = n1.brokerConn;
986 | b1.client.on('connect',function (connack) {
987 | // Send all metrics to trigger DBIRTH
988 | n1.receive({
989 | "payload" : {
990 | "metrics": [
991 | {
992 | "name": "test",
993 | "value": 11,
994 | },
995 | {
996 | "name": "test2",
997 | "value": 11
998 | }
999 | ]}
1000 | }
1001 | );
1002 | });
1003 | }catch (e) {
1004 | done(e);
1005 | }
1006 | });
1007 | }
1008 | })
1009 | });
1010 |
1011 | client.on('message', function (topic, message) {
1012 | // Verify that we sent a DBirth Message to the broker
1013 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1014 | n1.receive({
1015 | "payload" : {
1016 | "metrics": [
1017 | {
1018 | "name": "test",
1019 | "value": 100,
1020 | //"timestamp": new Date()
1021 | },
1022 | ]}
1023 | }
1024 | );
1025 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
1026 | var buffer = Buffer.from(message);
1027 | var payload = spPayload.decodePayload(buffer);
1028 | payload = pako.inflate(payload.body);
1029 | buffer = Buffer.from(payload);
1030 | payload = spPayload.decodePayload(buffer);
1031 |
1032 | payload.should.have.property("timestamp");
1033 | long.isLong(payload.timestamp).should.be.true();
1034 |
1035 | payload.should.have.property("seq");
1036 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
1037 |
1038 |
1039 | payload.metrics[0].should.have.property("name").which.is.eql("test");
1040 | payload.metrics[0].should.have.property("value").which.is.eql(100);
1041 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
1042 | //payload.metrics[0].should.have.property("timestamp").which.is.a.Number();
1043 | payload.metrics.length.should.eql(1);
1044 | Object.keys(payload.metrics[0]).length.should.eql(4);
1045 |
1046 |
1047 | simpleFlow[1].compressAlgorithm = undefined;
1048 | done();
1049 | //client.end();
1050 | }
1051 |
1052 | });
1053 |
1054 | }); // it end
1055 |
1056 | it('should warn and send uncompressed on unknown Compression Algorithm', function (done) {
1057 | client = mqtt.connect(testBroker);
1058 | simpleFlow[1].compressAlgorithm = "WINZUP";
1059 | let n1;
1060 | let b1;
1061 | client.on('connect', function () {
1062 | client.subscribe('#', function (err) {
1063 | if (!err) {
1064 | helper.load(sparkplugNode, simpleFlow, function () {
1065 | try {
1066 | n1 = helper.getNode("n1");
1067 | b1 = n1.brokerConn;
1068 | b1.client.on('connect',function (connack) {
1069 | n1.on('call:warn', call => {
1070 | call.should.be.calledWithExactly('mqtt-sparkplug-plus.errors.unable-to-encode-message');
1071 | done();
1072 | });
1073 |
1074 | n1.on('input', () => {
1075 |
1076 | // FIXME: warn should be called, but its not! (works in node-red)
1077 | // need to fix test
1078 | //n1.warn.should.be.calledWithExactly('mqtt-sparkplug-plus.errors.unable-to-encode-message');
1079 |
1080 | });
1081 | // Send all metrics to trigger DBIRTH
1082 | n1.receive({
1083 | "payload" : {
1084 | "metrics": [
1085 | {
1086 | "name": "test",
1087 | "value": 11,
1088 | },
1089 | {
1090 | "name": "test2",
1091 | "value": 11
1092 | }
1093 | ]}
1094 | }
1095 | );
1096 | });
1097 | }catch (e) {
1098 |
1099 |
1100 | done(e);
1101 | }
1102 | });
1103 | }
1104 | })
1105 | });
1106 |
1107 | client.on('message', function (topic, message) {
1108 | // Verify that we sent a DBirth Message to the broker
1109 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1110 | n1.receive({
1111 | "payload" : {
1112 | "metrics": [
1113 | {
1114 | "name": "test",
1115 | "value": 100,
1116 | //"timestamp": new Date()
1117 | },
1118 | ]}
1119 | }
1120 | );
1121 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
1122 | var buffer = Buffer.from(message);
1123 | var payload = spPayload.decodePayload(buffer);
1124 |
1125 |
1126 | payload.should.have.property("timestamp");
1127 | long.isLong(payload.timestamp).should.be.true();
1128 | payload.should.have.property("seq");
1129 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
1130 |
1131 | payload.metrics[0].should.have.property("name").which.is.eql("test");
1132 | payload.metrics[0].should.have.property("value").which.is.eql(100);
1133 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
1134 | //payload.metrics[0].should.have.property("timestamp").which.is.a.Number();
1135 | payload.metrics.length.should.eql(1);
1136 | Object.keys(payload.metrics[0]).length.should.eql(4);
1137 |
1138 | simpleFlow[1].compressAlgorithm = undefined;
1139 | done();
1140 | //client.end();
1141 | }
1142 |
1143 | });
1144 |
1145 | }); // it end
1146 |
1147 | // Dynamic definition verification:
1148 | it('should output DBIRTH and Metric when definition is passed in message', function (done) {
1149 |
1150 | client = mqtt.connect(testBroker);
1151 |
1152 | client.on('connect', function () {
1153 | client.subscribe('#', function (err) {
1154 | if (!err) {
1155 | helper.load(sparkplugNode, simpleFlow, function () {
1156 | try {
1157 |
1158 | n1 = helper.getNode("n1");
1159 | n1.on('call:error', call => {
1160 | // XXX
1161 | call.firstArg.should.eql("mqtt-sparkplug-plus.errors.payload-type-object")
1162 | done();
1163 | });
1164 | n1.receive({
1165 | "definition" : {
1166 | "TEST/TEST" : {
1167 | "dataType" : "Int32"
1168 | }
1169 | },
1170 | "payload" : {
1171 | "metrics" : [
1172 | {
1173 | "name" : "TEST/TEST",
1174 | "value" : 5
1175 | }]
1176 | }});
1177 | }catch (e) {
1178 | done(e);
1179 | }
1180 | });
1181 |
1182 | }
1183 | })
1184 | });
1185 |
1186 | client.on('message', function (topic, message) {
1187 | // Verify that we sent a DBirth Message to the broker
1188 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1189 | var buffer = Buffer.from(message);
1190 | var payload = spPayload.decodePayload(buffer);
1191 |
1192 |
1193 | payload.should.have.property("timestamp");
1194 | long.isLong(payload.timestamp).should.be.true();
1195 |
1196 | payload.metrics.should.containDeep([
1197 | { name: 'TEST/TEST', type: 'Int32', value: 5 },
1198 | ]);
1199 |
1200 | done();
1201 | }
1202 | });
1203 |
1204 |
1205 |
1206 | }); // it end
1207 |
1208 | it('should error when definition has invalid dataType', function (done) {
1209 |
1210 | client = mqtt.connect(testBroker);
1211 |
1212 | client.on('connect', function () {
1213 | client.subscribe('#', function (err) {
1214 | if (!err) {
1215 | helper.load(sparkplugNode, simpleFlow, function () {
1216 | try {
1217 | n1 = helper.getNode("n1");
1218 | b1 = n1.brokerConn;
1219 |
1220 | n1.on('call:error', call => {
1221 | call.should.be.calledWithExactly('mqtt-sparkplug-plus.errors.invalid-metric-definition');
1222 | done();
1223 | });
1224 |
1225 |
1226 | n1.receive({
1227 | "definition" : {
1228 | "TEST/TEST" : {
1229 | "dataType" : "fooBar"
1230 | }
1231 | }
1232 | });
1233 | }catch (e) {
1234 | done(e);
1235 | }
1236 | });
1237 | }
1238 | })
1239 | });
1240 |
1241 |
1242 | }); // it end
1243 |
1244 | it('should error when definition does not have a dataType', function (done) {
1245 |
1246 | client = mqtt.connect(testBroker);
1247 |
1248 | client.on('connect', function () {
1249 | client.subscribe('#', function (err) {
1250 | if (!err) {
1251 | helper.load(sparkplugNode, simpleFlow, function () {
1252 | try {
1253 | n1 = helper.getNode("n1");
1254 | b1 = n1.brokerConn;
1255 |
1256 | n1.on('call:error', call => {
1257 | call.should.be.calledWithExactly('mqtt-sparkplug-plus.errors.invalid-metric-definition');
1258 | done();
1259 | });
1260 |
1261 |
1262 | n1.receive({
1263 | "definition" : {
1264 | "TEST/TEST" : {
1265 | "foo" : "bar"
1266 | }
1267 | }
1268 | });
1269 | }catch (e) {
1270 | done(e);
1271 | }
1272 | });
1273 | }
1274 | })
1275 | });
1276 |
1277 |
1278 | }); // it end
1279 |
1280 | it('should send REBIRTH messages on updated definition', function (done) {
1281 | client = mqtt.connect(testBroker);
1282 | var initBirthDone = false;
1283 | var deathDone = false;
1284 | let n1;
1285 | let b1;
1286 | client.on('connect', function () {
1287 | client.subscribe('#', function (err) {
1288 | if (!err) {
1289 | helper.load(sparkplugNode, simpleFlow, function () {
1290 | try {
1291 | n1 = helper.getNode("n1");
1292 | b1 = n1.brokerConn;
1293 |
1294 | // Send all metrics to trigger DBIRTH
1295 | n1.receive({
1296 | "payload" : {
1297 | "metrics": [
1298 | {
1299 | "name": "test",
1300 | "value": 11,
1301 | },
1302 | {
1303 | "name": "test2",
1304 | "value": 11
1305 | }
1306 | ]}
1307 | }
1308 | );
1309 | }catch (e) {
1310 | done(e);
1311 | }
1312 | });
1313 | }
1314 | })
1315 | });
1316 |
1317 | client.on('message', function (topic, message) {
1318 |
1319 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
1320 | if (initBirthDone === true) {
1321 | var buffer = Buffer.from(message);
1322 | var payload = spPayload.decodePayload(buffer);
1323 | // Verify that we reset the seq to 0
1324 | payload.should.have.property("seq").which.is.eql(1);
1325 | }
1326 | } else if (topic === "spBv1.0/My Devices/DDEATH/Node-Red/TEST2"){
1327 | deathDone = true;
1328 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1329 | // Ready to issue rebirth
1330 | if (initBirthDone === true) {
1331 | var buffer = Buffer.from(message);
1332 | var payload = spPayload.decodePayload(buffer);
1333 | deathDone.should.eql(true);
1334 | payload.metrics.should.containDeep([
1335 | { name: 'TEST/TEST', type: 'Int32', value: 10 },
1336 | ]);
1337 | done();
1338 |
1339 | } else {
1340 | // Here we should force a rebirth
1341 | n1.receive({
1342 | "definition" : {
1343 | "TEST/TEST" : {
1344 | "dataType" : "Int32"
1345 | }
1346 | },
1347 | "payload" : {
1348 | "metrics" : [
1349 | {
1350 | "name" : "TEST/TEST",
1351 | "value" : 10
1352 | }]
1353 | }});
1354 | initBirthDone = true;
1355 | }
1356 | }
1357 | });
1358 | }); // it end
1359 |
1360 | it('should send REBIRTH messages on updated definition w cache sync.', function (done) {
1361 | client = mqtt.connect(testBroker);
1362 | var initBirthDone = false;
1363 | var deathDone = false;
1364 | let n1;
1365 | let b1;
1366 | client.on('connect', function () {
1367 | client.subscribe('#', function (err) {
1368 | if (!err) {
1369 | helper.load(sparkplugNode, simpleFlow, function () {
1370 | try {
1371 | n1 = helper.getNode("n1");
1372 | b1 = n1.brokerConn;
1373 |
1374 | // Send all metrics to trigger DBIRTH
1375 | n1.receive({
1376 | "payload" : {
1377 | "metrics": [
1378 | {
1379 | "name": "test",
1380 | "value": 11,
1381 | },
1382 | {
1383 | "name": "test2",
1384 | "value": 11
1385 | }
1386 | ]}
1387 | }
1388 | );
1389 | }catch (e) {
1390 | done(e);
1391 | }
1392 | });
1393 | }
1394 | })
1395 | });
1396 |
1397 | client.on('message', function (topic, message) {
1398 |
1399 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
1400 | if (initBirthDone === true) {
1401 | var buffer = Buffer.from(message);
1402 | var payload = spPayload.decodePayload(buffer);
1403 | // Verify that we reset the seq to 0
1404 | payload.should.have.property("seq").which.is.eql(1);
1405 | }
1406 | } else if (topic === "spBv1.0/My Devices/DDEATH/Node-Red/TEST2"){
1407 | deathDone = true;
1408 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1409 | // Ready to issue rebirth
1410 | if (initBirthDone === true) {
1411 | var buffer = Buffer.from(message);
1412 | var payload = spPayload.decodePayload(buffer);
1413 | deathDone.should.eql(true);
1414 | payload.metrics.should.containDeep([
1415 | { name: 'TEST/TEST', type: 'Int32', value: 10 },
1416 | { name: 'test2', type: 'Int32', value: 11 },
1417 | ]);
1418 | done();
1419 |
1420 | } else {
1421 | // Here we should force a rebirth
1422 | n1.receive({
1423 | "definition" : {
1424 | "TEST/TEST" : {
1425 | "dataType" : "Int32"
1426 | },
1427 | "test2" : {
1428 | "dataType" : "Int32"
1429 | },
1430 | },
1431 | "payload" : {
1432 | "metrics" : [
1433 | {
1434 | "name" : "TEST/TEST",
1435 | "value" : 10
1436 | }]
1437 | }});
1438 | initBirthDone = true;
1439 | }
1440 | }
1441 | });
1442 | }); // it end
1443 |
1444 | // Check Rebirth
1445 | it('should send REBIRTH on REBIRTH Command', function (done) {
1446 | client = mqtt.connect(testBroker);
1447 | var initBirthDone = false;
1448 | var deathDone = false;
1449 | let n1;
1450 | let b1;
1451 |
1452 | client.on('connect', function () {
1453 | client.subscribe('#', function (err) {
1454 | if (!err) {
1455 | helper.load(sparkplugNode, simpleFlow, function () {
1456 | try {
1457 | n1 = helper.getNode("n1");
1458 | b1 = n1.brokerConn;
1459 |
1460 | // Send all metrics to trigger DBIRTH
1461 | n1.receive({
1462 | "payload" : {
1463 | "metrics": [
1464 | {
1465 | "name": "test",
1466 | "value": 11,
1467 | },
1468 | {
1469 | "name": "test2",
1470 | "value": 11
1471 | }
1472 | ]}
1473 | }
1474 | );
1475 | }catch (e) {
1476 | done(e);
1477 | }
1478 | });
1479 | }
1480 | })
1481 | });
1482 |
1483 | client.on('message', function (topic, message) {
1484 |
1485 |
1486 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
1487 | if (initBirthDone === true) {
1488 | var buffer = Buffer.from(message);
1489 | var payload = spPayload.decodePayload(buffer);
1490 | // Verify that we reset the seq to 0
1491 | payload.should.have.property("seq").which.is.eql(1);
1492 | }
1493 | } else if (topic === "spBv1.0/My Devices/DDEATH/Node-Red/TEST2"){
1494 | deathDone = true;
1495 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1496 | // Ready to issue rebirth
1497 | if (initBirthDone === true) {
1498 | var buffer = Buffer.from(message);
1499 | var payload = spPayload.decodePayload(buffer);
1500 | deathDone.should.eql(true);
1501 | done();
1502 |
1503 | } else {
1504 | // Here we should force a rebirth
1505 | n1.receive({
1506 | "command" : {
1507 | "device" : {
1508 | "rebirth" : true
1509 | }
1510 | }
1511 | });
1512 | initBirthDone = true;
1513 | }
1514 | }
1515 | });
1516 | }); // it end
1517 |
1518 | // Check DEATH command will send DDEATH
1519 | it('should send DDEATH on DEATH Command', function (done) {
1520 | client = mqtt.connect(testBroker);
1521 | var initBirthDone = false;
1522 | var deathDone = false;
1523 | let n1;
1524 | let b1;
1525 |
1526 | client.on('connect', function () {
1527 | client.subscribe('#', function (err) {
1528 | if (!err) {
1529 | helper.load(sparkplugNode, simpleFlow, function () {
1530 | try {
1531 | n1 = helper.getNode("n1");
1532 | b1 = n1.brokerConn;
1533 |
1534 | // Send all metrics to trigger DBIRTH
1535 | n1.receive({
1536 | "payload" : {
1537 | "metrics": [
1538 | {
1539 | "name": "test",
1540 | "value": 11,
1541 | },
1542 | {
1543 | "name": "test2",
1544 | "value": 11
1545 | }
1546 | ]}
1547 | }
1548 | );
1549 | }catch (e) {
1550 | done(e);
1551 | }
1552 | });
1553 | }
1554 | })
1555 | });
1556 |
1557 | client.on('message', function (topic, message) {
1558 |
1559 |
1560 | if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red") {
1561 | if (initBirthDone === true) {
1562 | var buffer = Buffer.from(message);
1563 | var payload = spPayload.decodePayload(buffer);
1564 | // Verify that we reset the seq to 0
1565 | payload.should.have.property("seq").which.is.eql(1);
1566 | }
1567 | } else if (topic === "spBv1.0/My Devices/DDEATH/Node-Red/TEST2"){
1568 | done();
1569 | } else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1570 | // Here we should force a rebirth
1571 | n1.receive({
1572 | "command" : {
1573 | "device" : {
1574 | "death" : true
1575 | }
1576 | }
1577 | });
1578 | initBirthDone = true;
1579 |
1580 | }
1581 | });
1582 | }); // it end
1583 |
1584 | it('should alias metrics if enabled', function (done) {
1585 | client = mqtt.connect(testBroker);
1586 | let n1;
1587 | let b1;
1588 | client.on('connect', function () {
1589 | client.subscribe('#', function (err) {
1590 | if (!err) {
1591 | simpleFlow[1].aliasMetrics = true;
1592 | helper.load(sparkplugNode, simpleFlow, function () {
1593 | try {
1594 | n1 = helper.getNode("n1");
1595 | b1 = n1.brokerConn;
1596 | b1.client.on('connect',function (connack) {
1597 | // Send all metrics to trigger DBIRTH
1598 | n1.receive({
1599 | "payload" : {
1600 | "metrics": [
1601 | {
1602 | "name": "test",
1603 | "value": 11,
1604 | },
1605 | {
1606 | "name": "test2",
1607 | "value": 11
1608 | }
1609 | ]}
1610 | }
1611 | );
1612 | });
1613 | }catch (e) {
1614 | done(e);
1615 | }
1616 | });
1617 | }
1618 | })
1619 | });
1620 |
1621 | client.on('message', function (topic, message) {
1622 | // Verify that we sent a DBirth Message to the broker
1623 | if (topic === "spBv1.0/My Devices/NBIRTH/Node-Red"){
1624 | var buffer = Buffer.from(message);
1625 | var payload = spPayload.decodePayload(buffer);
1626 | payload.metrics[0].should.have.property("name").which.is.eql("Node Control/Rebirth");
1627 | payload.metrics[0].should.not.have.property("alias");
1628 | payload.metrics[0].should.have.property("value").which.is.eql(false);
1629 |
1630 | payload.metrics[1].should.have.property("name").which.is.eql("bdSeq");
1631 | payload.metrics[1].should.not.have.property("alias");
1632 | }
1633 | else if (topic === "spBv1.0/My Devices/DBIRTH/Node-Red/TEST2"){
1634 |
1635 | var buffer = Buffer.from(message);
1636 | var payload = spPayload.decodePayload(buffer);
1637 |
1638 |
1639 | payload.should.have.property("timestamp");
1640 | long.isLong(payload.timestamp).should.be.true();
1641 |
1642 | payload.metrics[0].should.have.property("name").which.is.eql("test");
1643 | payload.metrics[0].should.have.property("value");
1644 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
1645 | //payload.metrics[0].should.have.property("alias").which.is.eql(1);
1646 | alias = payload.metrics[0].alias.toNumber();
1647 | alias.should.eql(1);
1648 | n1.receive({
1649 | "payload" : {
1650 | "metrics": [
1651 | {
1652 | "name": "test",
1653 | "value": 100,
1654 | //"timestamp": new Date()
1655 | },
1656 | ]}
1657 | }
1658 | );
1659 | } else if (topic === "spBv1.0/My Devices/DDATA/Node-Red/TEST2") {
1660 | var buffer = Buffer.from(message);
1661 | var payload = spPayload.decodePayload(buffer);
1662 |
1663 | payload.should.have.property("timestamp");
1664 | long.isLong(payload.timestamp).should.be.true();
1665 |
1666 | //payload.metrics[0].should.have.property("name").which.is.eql(""); // name is decoded to "" if missing
1667 | payload.metrics[0].should.have.property("value");
1668 | payload.metrics[0].should.have.property("type").which.is.eql("Int32");
1669 | payload.metrics[0].should.have.property("alias");
1670 | payload.metrics[0].should.not.have.property("name");
1671 | alias = payload.metrics[0].alias.toNumber();
1672 | alias.should.eql(1);
1673 | //payload.metrics[0].should.have.property("timestamp").which.is.a.Number();
1674 | payload.metrics.length.should.eql(1);
1675 | Object.keys(payload.metrics[0]).length.should.eql(4);
1676 |
1677 | payload.should.have.property("seq");
1678 | payload.seq.toInt().should.eql(2); // 0 is NBIRTH, 1 is DBIRTH
1679 | done();
1680 | //client.end();
1681 | }
1682 |
1683 | });
1684 |
1685 | }); // it end
1686 |
1687 | // Check that
1688 |
1689 |
1690 | // TODO:
1691 | // Test unknown metric data type
1692 | // Test NDEATH
1693 | // Test Invalid DCMD
1694 |
1695 | });
1696 |
1697 |
--------------------------------------------------------------------------------