├── .github
├── dependabot.yml
└── workflows
│ ├── ci.yml
│ ├── commands.yml
│ └── npm-publish.yml
├── .gitignore
├── .gitmodules
├── .gitpod.yml
├── .npmignore
├── .npmrc
├── LICENSE
├── README.md
├── benchmark
├── benchmark_all_types.js
├── benchmark_by_kind.js
├── benchmark_by_subtype.js
├── benchmark_by_test.js
├── benchmark_by_type.js
└── benchmark_unified.js
├── doc
├── api.md
├── compiler.md
├── history.md
└── newDatatypes.md
├── example.js
├── examples
├── compiled.js
├── error_handling.js
├── example_protocol.json
├── full_protocol.js
├── full_protocol_example.json
└── variable.js
├── index.d.ts
├── index.js
├── package.json
├── src
├── compiler.js
├── datatypes
│ ├── compiler-conditional.js
│ ├── compiler-structures.js
│ ├── compiler-utils.js
│ ├── conditional.js
│ ├── numeric.js
│ ├── structures.js
│ ├── utils.js
│ └── varint.js
├── index.js
├── protodef.js
├── serializer.js
└── utils.js
└── test
├── dataTypes
├── datatypes.js
└── prepareTests.js
└── misc.js
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | updates:
4 | - package-ecosystem: "npm"
5 | directory: "/"
6 | schedule:
7 | interval: weekly
8 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 |
14 | strategy:
15 | matrix:
16 | node-version: [14.x]
17 |
18 | steps:
19 | - uses: actions/checkout@v2
20 | with:
21 | submodules: true
22 | - name: Use Node.js ${{ matrix.node-version }}
23 | uses: actions/setup-node@v1
24 | with:
25 | node-version: ${{ matrix.node-version }}
26 | - run: npm install
27 | - run: npm test
28 | - run: npm run benchmark
--------------------------------------------------------------------------------
/.github/workflows/commands.yml:
--------------------------------------------------------------------------------
1 | name: Repo Commands
2 |
3 | on:
4 | issue_comment: # Handle comment commands
5 | types: [created]
6 | pull_request: # Handle renamed PRs
7 | types: [edited]
8 |
9 | jobs:
10 | comment-trigger:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: Check out repository
14 | uses: actions/checkout@v3
15 | - name: Run command handlers
16 | uses: PrismarineJS/prismarine-repo-actions@master
17 | with:
18 | # NOTE: You must specify a Personal Access Token (PAT) with repo access here. While you can use the default GITHUB_TOKEN, actions taken with it will not trigger other actions, so if you have a CI workflow, commits created by this action will not trigger it.
19 | token: ${{ secrets.GITHUB_TOKEN }}
20 | # See `Options` section below for more info on these options
21 | install-command: npm install
22 | /fixlint.fix-command: npm run fix
23 |
--------------------------------------------------------------------------------
/.github/workflows/npm-publish.yml:
--------------------------------------------------------------------------------
1 | name: npm-publish
2 | on:
3 | push:
4 | branches:
5 | - master # Change this to your default branch
6 | jobs:
7 | npm-publish:
8 | name: npm-publish
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout repository
12 | uses: actions/checkout@master
13 | with:
14 | submodules: true
15 | - name: Set up Node.js
16 | uses: actions/setup-node@master
17 | with:
18 | node-version: 14.0.0
19 | - id: publish
20 | uses: JS-DevTools/npm-publish@v1
21 | with:
22 | token: ${{ secrets.NPM_AUTH_TOKEN }}
23 | - name: Create Release
24 | if: steps.publish.outputs.type != 'none'
25 | id: create_release
26 | uses: actions/create-release@v1
27 | env:
28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
29 | with:
30 | tag_name: ${{ steps.publish.outputs.version }}
31 | release_name: Release ${{ steps.publish.outputs.version }}
32 | body: ${{ steps.publish.outputs.version }}
33 | draft: false
34 | prerelease: false
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | npm-debug.log
3 | dist/
4 | package-lock.json
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "ProtoDef"]
2 | path = ProtoDef
3 | url = https://github.com/ProtoDef-io/ProtoDef.git
4 |
--------------------------------------------------------------------------------
/.gitpod.yml:
--------------------------------------------------------------------------------
1 | tasks:
2 | - command: npm install
3 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | npm-debug.log
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | package-lock=false
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 ProtoDef-io
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ProtoDef
2 | [](http://npmjs.com/package/protodef)
3 | [](https://discord.gg/GsEFRM8)
4 | [](https://gitter.im/PrismarineJS/general)
5 | [](https://irc.gitter.im/)
6 | [](https://github.com/ProtoDef-io/node-protodef/actions?query=workflow%3A%22CI%22)
7 | [](https://tonicdev.com/npm/protodef)
8 | [](https://gitpod.io/#https://github.com/ProtoDef-io/node-protodef)
9 |
10 | This is a node.js module to simplify defining, reading and writing binary blobs,
11 | whether they be internet protocols or files.
12 |
13 | ## Installing
14 |
15 | ```
16 | npm install ProtoDef
17 | ```
18 |
19 |
20 | ## Usage
21 |
22 | See [example](example.js)
23 |
24 | ## Documentation
25 |
26 | See the language independent [ProtoDef](https://github.com/ProtoDef-io/ProtoDef) specification.
27 |
28 | * [api.md](doc/api.md) documents the exposed functions and classes
29 | * [compiler.md](doc/compiler.md) documents the ProtoDef Compiler
30 | * [datatypes.md](https://github.com/ProtoDef-io/ProtoDef/blob/master/doc/datatypes.md) documents the default datatypes provided by Protodef.
31 | * [newDatatypes.md](doc/newDatatypes.md) explains how to create new datatypes for protodef
32 | * [history.md](doc/history.md) is the releases history
33 |
34 | ## Projects Using ProtoDef
35 |
36 | * [minecraft-protocol](https://github.com/PrismarineJS/node-minecraft-protocol) defines a protocol.json by minecraft version and use ProtoDef to serialize and parse packets
37 | * the protocol.json files are stored in [minecraft-data](https://github.com/PrismarineJS/minecraft-data/blob/master/data/pc/1.8/protocol.json)
38 | * and they can be visualized automatically in this [doc](http://prismarinejs.github.io/minecraft-data/?d=protocol)
39 | * [prismarine-nbt](https://github.com/PrismarineJS/prismarine-nbt) defined a nbt.json to parse and serialize the NBT format
40 | * [mineflayer](https://github.com/PrismarineJS/mineflayer/blob/master/lib/plugins/command_block.js) uses ProtoDef to parse plugin messages
41 | * [minecraft-protocol-forge](https://github.com/PrismarineJS/node-minecraft-protocol-forge) parses and serialize forge plugin messages
42 | * [node-raknet](https://github.com/mhsjlw/node-raknet) describe the raknet protocol in a protocol.json and uses ProtoDef to read it
43 | * [minecraft-classic-protocol](https://github.com/mhsjlw/minecraft-classic-protocol) defines the classic minecraft protocol with ProtoDef
44 | * [pocket-minecraft-protocol](https://github.com/mhsjlw/pocket-minecraft-protocol) defines the minecraft pocket edition protocol
45 | * [diablo2-protocol](https://github.com/MephisTools/diablo2-protocol) Diablo 2 network protocol
46 | * [dofus-protocol](https://github.com/AstrubTools/dofus-protocol) Network protocol for dofus : create client and servers for dofus 1.30
47 |
48 |
--------------------------------------------------------------------------------
/benchmark/benchmark_all_types.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | it('reads', function () {
7 | this.timeout(1000 * 60 * 10)
8 | const readSuite = new Benchmark.Suite()
9 | readSuite.add('read', function () {
10 | testData.forEach(tests => {
11 | tests.data.forEach(test => {
12 | test.subtypes.forEach(subType => {
13 | subType.values.forEach((value) => {
14 | proto.parsePacketBuffer(subType.type, value.buffer)
15 | })
16 | })
17 | })
18 | })
19 | })
20 | .on('cycle', function (event) {
21 | console.log(String(event.target))
22 | })
23 | .run({ async: false })
24 | })
25 |
26 | it('writes', function () {
27 | this.timeout(1000 * 60 * 10)
28 | const writeSuite = new Benchmark.Suite()
29 | writeSuite.add('write', function () {
30 | testData.forEach(tests => {
31 | tests.data.forEach(test => {
32 | test.subtypes.forEach(subType => {
33 | subType.values.forEach((value) => {
34 | proto.createPacketBuffer(subType.type, value.value)
35 | })
36 | })
37 | })
38 | })
39 | })
40 | .on('cycle', function (event) {
41 | console.log(String(event.target))
42 | })
43 | .run({ async: false })
44 | })
45 |
46 | it('reads (compiled)', function () {
47 | this.timeout(1000 * 60 * 10)
48 | const readSuite = new Benchmark.Suite()
49 | readSuite.add('read (compiled)', function () {
50 | testData.forEach(tests => {
51 | tests.data.forEach(test => {
52 | test.subtypes.forEach(subType => {
53 | subType.values.forEach((value) => {
54 | compiledProto.parsePacketBuffer(subType.type, value.buffer)
55 | })
56 | })
57 | })
58 | })
59 | })
60 | .on('cycle', function (event) {
61 | console.log(String(event.target))
62 | })
63 | .run({ async: false })
64 | })
65 |
66 | it('writes (compiled)', function () {
67 | this.timeout(1000 * 60 * 10)
68 | const writeSuite = new Benchmark.Suite()
69 | writeSuite.add('write (compiled)', function () {
70 | testData.forEach(tests => {
71 | tests.data.forEach(test => {
72 | test.subtypes.forEach(subType => {
73 | subType.values.forEach((value) => {
74 | compiledProto.createPacketBuffer(subType.type, value.value)
75 | })
76 | })
77 | })
78 | })
79 | })
80 | .on('cycle', function (event) {
81 | console.log(String(event.target))
82 | })
83 | .run({ async: false })
84 | })
85 |
--------------------------------------------------------------------------------
/benchmark/benchmark_by_kind.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | testData.forEach(tests => {
7 | describe(tests.kind, function () {
8 | this.timeout(1000 * 60 * 10)
9 |
10 | it('reads', function () {
11 | const readSuite = new Benchmark.Suite()
12 | readSuite.add('read', function () {
13 | tests.data.forEach(test => {
14 | test.subtypes.forEach(subType => {
15 | subType.values.forEach((value) => {
16 | proto.parsePacketBuffer(subType.type, value.buffer)
17 | })
18 | })
19 | })
20 | })
21 | .on('cycle', function (event) {
22 | console.log(String(event.target))
23 | })
24 | .run({ async: false })
25 | })
26 |
27 | it('writes', function () {
28 | const writeSuite = new Benchmark.Suite()
29 | writeSuite.add('write', function () {
30 | tests.data.forEach(test => {
31 | test.subtypes.forEach(subType => {
32 | subType.values.forEach((value) => {
33 | proto.createPacketBuffer(subType.type, value.value)
34 | })
35 | })
36 | })
37 | })
38 | .on('cycle', function (event) {
39 | console.log(String(event.target))
40 | })
41 | .run({ async: false })
42 | })
43 |
44 | it('reads (compiled)', function () {
45 | const readSuite = new Benchmark.Suite()
46 | readSuite.add('read (compiled)', function () {
47 | tests.data.forEach(test => {
48 | test.subtypes.forEach(subType => {
49 | subType.values.forEach((value) => {
50 | compiledProto.parsePacketBuffer(subType.type, value.buffer)
51 | })
52 | })
53 | })
54 | })
55 | .on('cycle', function (event) {
56 | console.log(String(event.target))
57 | })
58 | .run({ async: false })
59 | })
60 |
61 | it('writes (compiled)', function () {
62 | const writeSuite = new Benchmark.Suite()
63 | writeSuite.add('write (compiled)', function () {
64 | tests.data.forEach(test => {
65 | test.subtypes.forEach(subType => {
66 | subType.values.forEach((value) => {
67 | compiledProto.createPacketBuffer(subType.type, value.value)
68 | })
69 | })
70 | })
71 | })
72 | .on('cycle', function (event) {
73 | console.log(String(event.target))
74 | })
75 | .run({ async: false })
76 | })
77 | })
78 | })
79 |
--------------------------------------------------------------------------------
/benchmark/benchmark_by_subtype.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | function testType (type, values) {
7 | it('reads', function () {
8 | const readSuite = new Benchmark.Suite()
9 | readSuite.add('read', function () {
10 | values.forEach((value) => {
11 | proto.parsePacketBuffer(type, value.buffer)
12 | })
13 | })
14 | .on('cycle', function (event) {
15 | console.log(String(event.target))
16 | })
17 | .run({ async: false })
18 | })
19 |
20 | it('writes', function () {
21 | const writeSuite = new Benchmark.Suite()
22 | writeSuite.add('write', function () {
23 | values.forEach((value) => {
24 | proto.createPacketBuffer(type, value.value)
25 | })
26 | })
27 | .on('cycle', function (event) {
28 | console.log(String(event.target))
29 | })
30 | .run({ async: false })
31 | })
32 |
33 | it('reads (compiled)', function () {
34 | const readSuite = new Benchmark.Suite()
35 | readSuite.add('read (compiled)', function () {
36 | values.forEach((value) => {
37 | compiledProto.parsePacketBuffer(type, value.buffer)
38 | })
39 | })
40 | .on('cycle', function (event) {
41 | console.log(String(event.target))
42 | })
43 | .run({ async: false })
44 | })
45 |
46 | it('writes (compiled)', function () {
47 | const writeSuite = new Benchmark.Suite()
48 | writeSuite.add('write (compiled)', function () {
49 | values.forEach((value) => {
50 | compiledProto.createPacketBuffer(type, value.value)
51 | })
52 | })
53 | .on('cycle', function (event) {
54 | console.log(String(event.target))
55 | })
56 | .run({ async: false })
57 | })
58 | }
59 |
60 | testData.forEach(tests => {
61 | describe(tests.kind, function () {
62 | this.timeout(1000 * 60 * 10)
63 |
64 | tests.data.forEach(test => {
65 | describe(test.type, () => {
66 | test.subtypes.forEach((subtype) => {
67 | if (subtype.description) {
68 | describe(subtype.description, () => {
69 | testType(subtype.type, subtype.values)
70 | })
71 | } else { testType(subtype.type, subtype.values) }
72 | })
73 | })
74 | })
75 | })
76 | })
77 |
--------------------------------------------------------------------------------
/benchmark/benchmark_by_test.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | function testValue (type, value, buffer) {
7 | it('writes', function () {
8 | const suite = new Benchmark.Suite()
9 | suite.add('writes', function () {
10 | proto.createPacketBuffer(type, value)
11 | })
12 | .on('cycle', function (event) {
13 | console.log(String(event.target))
14 | })
15 | .run({ async: false })
16 | })
17 | it('reads', function () {
18 | const suite = new Benchmark.Suite()
19 | suite.add('read', function () {
20 | proto.parsePacketBuffer(type, buffer)
21 | })
22 | .on('cycle', function (event) {
23 | console.log(String(event.target))
24 | })
25 | .run({ async: false })
26 | })
27 |
28 | it('writes (compiled)', function () {
29 | const suite = new Benchmark.Suite()
30 | suite.add('writes (compiled)', function () {
31 | compiledProto.createPacketBuffer(type, value)
32 | })
33 | .on('cycle', function (event) {
34 | console.log(String(event.target))
35 | })
36 | .run({ async: false })
37 | })
38 | it('reads (compiled)', function () {
39 | const suite = new Benchmark.Suite()
40 | suite.add('read (compiled)', function () {
41 | compiledProto.parsePacketBuffer(type, buffer)
42 | })
43 | .on('cycle', function (event) {
44 | console.log(String(event.target))
45 | })
46 | .run({ async: false })
47 | })
48 | }
49 |
50 | function testType (type, values) {
51 | values.forEach((value) => {
52 | if (value.description) {
53 | describe(value.description, () => {
54 | testValue(type, value.value, value.buffer)
55 | })
56 | } else { testValue(type, value.value, value.buffer) }
57 | })
58 | }
59 |
60 | testData.forEach(tests => {
61 | describe(tests.kind, function () {
62 | this.timeout(1000 * 60 * 10)
63 |
64 | tests.data.forEach(test => {
65 | describe(test.type, () => {
66 | test.subtypes.forEach((subtype) => {
67 | if (subtype.description) {
68 | describe(subtype.description, () => {
69 | testType(subtype.type, subtype.values)
70 | })
71 | } else { testType(subtype.type, subtype.values) }
72 | })
73 | })
74 | })
75 | })
76 | })
77 |
--------------------------------------------------------------------------------
/benchmark/benchmark_by_type.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | testData.forEach(tests => {
7 | describe(tests.kind, function () {
8 | this.timeout(1000 * 60 * 10)
9 |
10 | tests.data.forEach(test => {
11 | describe(test.type, () => {
12 | it('reads', function () {
13 | const readSuite = new Benchmark.Suite()
14 | readSuite.add('read', function () {
15 | test.subtypes.forEach(subType => {
16 | subType.values.forEach((value) => {
17 | proto.parsePacketBuffer(subType.type, value.buffer)
18 | })
19 | })
20 | })
21 | .on('cycle', function (event) {
22 | console.log(String(event.target))
23 | })
24 | .run({ async: false })
25 | })
26 |
27 | it('writes', function () {
28 | const writeSuite = new Benchmark.Suite()
29 | writeSuite.add('write', function () {
30 | test.subtypes.forEach(subType => {
31 | subType.values.forEach((value) => {
32 | proto.createPacketBuffer(subType.type, value.value)
33 | })
34 | })
35 | })
36 | .on('cycle', function (event) {
37 | console.log(String(event.target))
38 | })
39 | .run({ async: false })
40 | })
41 |
42 | it('reads (compiled)', function () {
43 | const readSuite = new Benchmark.Suite()
44 | readSuite.add('read (compiled)', function () {
45 | test.subtypes.forEach(subType => {
46 | subType.values.forEach((value) => {
47 | compiledProto.parsePacketBuffer(subType.type, value.buffer)
48 | })
49 | })
50 | })
51 | .on('cycle', function (event) {
52 | console.log(String(event.target))
53 | })
54 | .run({ async: false })
55 | })
56 |
57 | it('writes (compiled)', function () {
58 | const writeSuite = new Benchmark.Suite()
59 | writeSuite.add('write (compiled)', function () {
60 | test.subtypes.forEach(subType => {
61 | subType.values.forEach((value) => {
62 | compiledProto.createPacketBuffer(subType.type, value.value)
63 | })
64 | })
65 | })
66 | .on('cycle', function (event) {
67 | console.log(String(event.target))
68 | })
69 | .run({ async: false })
70 | })
71 | })
72 | })
73 | })
74 | })
75 |
--------------------------------------------------------------------------------
/benchmark/benchmark_unified.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const { testData, proto, compiledProto } = require('../test/dataTypes/prepareTests')
4 | const Benchmark = require('benchmark')
5 |
6 | it('read/write', function () {
7 | this.timeout(1000 * 60 * 10)
8 | const suite = new Benchmark.Suite()
9 | suite.add('read/write', function () {
10 | testData.forEach(tests => {
11 | tests.data.forEach(test => {
12 | test.subtypes.forEach(subType => {
13 | subType.values.forEach((value) => {
14 | proto.parsePacketBuffer(subType.type, value.buffer)
15 | proto.createPacketBuffer(subType.type, value.value)
16 | })
17 | })
18 | })
19 | })
20 | })
21 | .on('cycle', function (event) {
22 | console.log(String(event.target))
23 | })
24 | .run({ async: false })
25 | })
26 |
27 | it('read/write (compiled)', function () {
28 | this.timeout(1000 * 60 * 10)
29 | const suite = new Benchmark.Suite()
30 | suite.add('read/write (compiled)', function () {
31 | testData.forEach(tests => {
32 | tests.data.forEach(test => {
33 | test.subtypes.forEach(subType => {
34 | subType.values.forEach((value) => {
35 | compiledProto.parsePacketBuffer(subType.type, value.buffer)
36 | compiledProto.createPacketBuffer(subType.type, value.value)
37 | })
38 | })
39 | })
40 | })
41 | })
42 | .on('cycle', function (event) {
43 | console.log(String(event.target))
44 | })
45 | .run({ async: false })
46 | })
47 |
--------------------------------------------------------------------------------
/doc/api.md:
--------------------------------------------------------------------------------
1 | # API
2 |
3 | ## ProtoDef(validation=true)
4 |
5 | ### ProtoDef.addType(name,functions,validate=true)
6 |
7 | Add the type `name` with the data `functions` which can be either:
8 | * "native" : that type is already implemented by ProtoDef
9 | * a js object defining a type based on other already defined types
10 | * `[read,write,sizeOf[,schema]]` functions
11 | * a validate boolean : to check the validity of the type against its schema or not
12 |
13 | See [newDataTypes.md](newDataTypes.md) for more details.
14 |
15 | ### ProtoDef.addTypes(types)
16 |
17 | Add `types` which is an object with keys the name of the types and values the type definitions.
18 |
19 | ### ProtoDef.addProtocol(protocol,path)
20 |
21 | Add types in `protocol` recursively. The protocol object is an object with keys `types` and namespace keys.
22 | * The value of the `types` key is an object of type name to type definition.
23 | * The value of the namespace key is a protocol object.
24 |
25 | The `path` is an array of namespace keys which select a path of namespaces to be added to the protodef object.
26 |
27 | See full_protocol.js for an example of usage.
28 |
29 | ### ProtoDef.setVariable(name, value)
30 |
31 | Sets a primitive variable type for the specified `name`, which can be dynamically updated. Can be refrenced in switch statements with the "/" prefix.
32 |
33 | ### ProtoDef.read(buffer, cursor, _fieldInfo, rootNodes)
34 |
35 | Read the packet defined by `_fieldInfo` in `buffer` starting from `cursor` using the context `rootNodes`.
36 |
37 | ### ProtoDef.write(value, buffer, offset, _fieldInfo, rootNode)
38 |
39 | Write the packet defined by `_fieldInfo` in `buffer` starting from `offset` with the value `value` and context `rootNode`
40 |
41 | ### ProtoDef.sizeOf(value, _fieldInfo, rootNode)
42 |
43 | Size of the packet `value` defined by `_fieldInfo` with context `rootNode`
44 |
45 | ### ProtoDef.createPacketBuffer(type,packet)
46 |
47 | Returns a buffer of the `packet` for `type`.
48 |
49 | ### ProtoDef.parsePacketBuffer(type,buffer,offset = 0)
50 |
51 | Returns a parsed packet of `buffer` for `type` starting at `offset`.
52 |
53 | ## Serializer(proto,mainType)
54 |
55 | Create a serializer of `mainType` defined in `proto`. This is a Transform stream.
56 |
57 | ### Serializer.createPacketBuffer(packet)
58 |
59 | Returns a buffer of the `packet`.
60 |
61 | ## Parser(proto,mainType)
62 |
63 | Create a parser of `mainType` defined in `proto`. This is a Transform stream.
64 |
65 | ### Parser.parsePacketBuffer(buffer)
66 |
67 | Returns a parsed packet of `buffer`.
68 |
69 | ## types
70 |
71 | An object mapping the default type names to the corresponding `[read,write,sizeOf]` functions.
72 |
73 | ## ProtoDefCompiler
74 |
75 | ### ProtoDefCompiler.addTypes(types)
76 |
77 | Add `types` which is an object with keys the name of the types and values the type definitions.
78 |
79 | ### ProtoDefCompiler.addProtocol(protocol,path)
80 |
81 | Add types in `protocol` recursively. The protocol object is an object with keys `types` and namespace keys.
82 | * The value of the `types` key is an object of type name to type definition.
83 | * The value of the namespace key is a protocol object.
84 |
85 | The `path` is an array of namespace keys which select a path of namespaces to be added to the protodef object.
86 |
87 | ### ProtoDefCompiler.addVariable(name, value)
88 |
89 | Adds a primitive variable type for the specified `name`, which can be dynamically updated. Can be refrenced in switch statements with the "/" prefix.
90 |
91 | ### ProtoDefCompiler.compileProtoDefSync(options = { printCode: false })
92 |
93 | Compile and return a `ProtoDef` object, optionaly print the generated javascript code.
94 |
95 | ## CompiledProtodef
96 |
97 | The class of which an instance is returned by compileProtoDefSync
98 |
99 | It follows the same interface as ProtoDef : read, write, sizeOf, createPacketBuffer, parsePacketBuffer
100 | Its constructor is CompiledProtodef(sizeOfCtx, writeCtx, readCtx).
101 | sizeOfCtx, writeCtx and readCtx are the compiled version of sizeOf, write and read. They are produced by Compiler.compile
102 |
103 | It can be used directly for easier debugging/using already compiled js.
104 |
105 | ### CompiledProtodef.setVariable(name, value)
106 |
107 | Sets a primitive variable type for the specified `name`, which can be dynamically updated. Can be refrenced in switch statements with the "/" prefix.
108 |
109 |
110 | ## utils
111 |
112 | Some functions that can be useful to build new datatypes reader and writer.
113 |
114 | ### utils.getField(countField, context)
115 |
116 | Get `countField` given `context`. Example: "../field" will get "field" one level above.
117 |
118 | ### utils.getFieldInfo(fieldInfo)
119 |
120 | Takes `fieldInfo` as :
121 | * `"type"`
122 | * `["type",typeArgs]`
123 | * `{ type: "type", typeArgs: typeArgs }`
124 |
125 | Returns `{ type: "type", typeArgs: typeArgs }`
126 |
127 | ### utils.addErrorField(e, field)
128 |
129 | Add `field` to error `e` and throw e.
130 |
131 | ### utils.tryCatch(tryfn, catchfn)
132 |
133 | A simple tryCatch function, useful for optimization.
134 | returns what tryfn returns
135 |
136 | ### utils.tryDoc(tryfn, field)
137 |
138 | Try `tryfn`, it it fails, use addErrorField with `field`
139 |
--------------------------------------------------------------------------------
/doc/compiler.md:
--------------------------------------------------------------------------------
1 | # Using the ProtoDef compiler
2 |
3 | The ProtoDef compiler can convert your protocol JSON into javascript code that can read and write buffers directly instead of using the ProtoDef interpreter. Depending on the types, the expected speedups are in the range of x10 - x100.
4 |
5 | ## Simple usage
6 |
7 | Let's take a simple ProtoDef definition and convert it to use the ProtoDef compiler:
8 |
9 | ProtoDef:
10 | ```javascript
11 | const ProtoDef = require('protodef').ProtoDef
12 |
13 | // Create a ProtoDef instance
14 | const proto = new ProtoDef()
15 | proto.addTypes(require('./protocol.json'))
16 |
17 | // Encode and decode a message
18 | const buffer = proto.createPacketBuffer('mainType', result)
19 | const result = proto.parsePacketBuffer('mainType', buffer)
20 | ```
21 |
22 | ProtoDef Compiler:
23 | ```javascript
24 | const { ProtoDefCompiler } = require('protodef').Compiler
25 |
26 | // Create a ProtoDefCompiler instance
27 | const compiler = new ProtoDefCompiler()
28 | compiler.addTypesToCompile(require('./protocol.json'))
29 |
30 | // Compile a ProtoDef instance
31 | const compiledProto = await compiler.compileProtoDef()
32 |
33 | // Use it as if it were a normal ProtoDef
34 | const buffer = compiledProto.createPacketBuffer('mainType', result)
35 | const result = compiledProto.parsePacketBuffer('mainType', buffer)
36 | ```
37 |
38 | ## New datatypes
39 |
40 | Like the ProtoDef interpreter, the ProtoDef compiler can be extended with custom datatypes. To register a custom type, use the `addTypes(types)` method of the ProtoDef compiler. The `types` parameter is an object with the following structure:
41 |
42 | ```javascript
43 | {
44 | Read: {
45 | 'type1': ['native', /* implementation */],
46 | 'type2': ['context', /* implementation */],
47 | 'type3': ['parametrizable', /* implementation */],
48 | /* ... */
49 | },
50 |
51 | Write: {
52 | 'type1': ['native', /* implementation */],
53 | 'type2': ['context', /* implementation */],
54 | 'type3': ['parametrizable', /* implementation */],
55 | /* ... */
56 | },
57 |
58 | SizeOf: {
59 | 'type1': ['native', /* implementation */],
60 | 'type2': ['context', /* implementation */],
61 | 'type3': ['parametrizable', /* implementation */],
62 | /* ... */
63 | }
64 | }
65 | ```
66 |
67 | The types can be divided into 3 categories:
68 |
69 | ### Native Type
70 |
71 | A native type is a type read or written by a function that will be called in its original context. Use this when you need access to external definitions.
72 |
73 | Example:
74 | ```javascript
75 | const UUID = require('uuid-1345')
76 |
77 | {
78 | Read: {
79 | 'UUID': ['native', (buffer, offset) => {
80 | return {
81 | value: UUID.stringify(buffer.slice(offset, 16 + offset)), // A native type can access all captured definitions
82 | size: 16
83 | }
84 | }]
85 | },
86 | Write: {
87 | 'UUID': ['native', (value, buffer, offset) => {
88 | const buf = UUID.parse(value)
89 | buf.copy(buffer, offset)
90 | return offset + 16
91 | }]
92 | },
93 | SizeOf: {
94 | 'UUID': ['native', 16] // For SizeOf, a native type can be a function or directly an integer
95 | }
96 | }
97 | ```
98 |
99 | The native types implementations are compatible with the native functions of the ProtoDef interpreter, and can reuse them.
100 |
101 | ### Context Type
102 |
103 | A context type is a type that will be called in the protocol's context. It can refer to registred native types using `native.{type}()` or context types (provided and generated) using `ctx.{type}()`, but cannot access its original context.
104 |
105 | Example:
106 | ```javascript
107 | const originalContextDefinition = require('something')
108 | /* global ctx */
109 | {
110 | Read: {
111 | 'compound': ['context', (buffer, offset) => {
112 | // originalContextDefinition.someting() // BAD: originalContextDefinition cannot be accessed in a context type
113 | const results = {
114 | value: {},
115 | size: 0
116 | }
117 | while (true) {
118 | const typ = ctx.i8(buffer, offset) // Access to a native type (that was copied in the context)
119 | if (typ.value === 0) {
120 | results.size += typ.size
121 | break
122 | }
123 |
124 | const readResults = ctx.nbt(buffer, offset) // Access to a type that was compiled and placed in the context
125 | offset += readResults.size
126 | results.size += readResults.size
127 | results.value[readResults.value.name] = {
128 | type: readResults.value.type,
129 | value: readResults.value.value
130 | }
131 | }
132 | return results
133 | }]
134 | },
135 |
136 | Write: {
137 | 'compound': ['context', (value, buffer, offset) => {
138 | for (const key in value) {
139 | offset = ctx.nbt({
140 | name: key,
141 | type: value[key].type,
142 | value: value[key].value
143 | }, buffer, offset)
144 | }
145 | offset = ctx.i8(0, buffer, offset)
146 | return offset
147 | }]
148 | },
149 |
150 | SizeOf: {
151 | 'compound': ['context', (value) => {
152 | let size = 1
153 | for (const key in value) {
154 | size += ctx.nbt({
155 | name: key,
156 | type: value[key].type,
157 | value: value[key].value
158 | })
159 | }
160 | return size
161 | }]
162 | }
163 | }
164 | ```
165 |
166 | ### Parametrized Type
167 |
168 | A parametrizable type is a function that will be generated at compile time using the provided maker function.
169 |
170 | Example:
171 | ```javascript
172 | {
173 | Read: {
174 | 'option': ['parametrizable', (compiler, type) => {
175 | let code = 'const {value} = ctx.bool(buffer, offset)\n'
176 | code += 'if (value) {\n'
177 | code += ' const { value, size } = ' + compiler.callType(type) + '\n'
178 | code += ' return { value, size: size + 1 }\n'
179 | code += '}\n'
180 | code += 'return { value: undefined, size: 1}'
181 | return compiler.wrapCode(code)
182 | }]
183 | },
184 |
185 | Write: {
186 | 'option': ['parametrizable', (compiler, type) => {
187 | let code = 'if (value !== null) {\n'
188 | code += ' offset = ctx.bool(1, buffer, offset)\n'
189 | code += ' offset = ' + compiler.callType('value', type) + '\n'
190 | code += '} else {\n'
191 | code += ' offset = ctx.bool(0, buffer, offset)\n'
192 | code += '}\n'
193 | code += 'return offset'
194 | return compiler.wrapCode(code)
195 | }]
196 | },
197 |
198 | SizeOf: {
199 | 'option': ['parametrizable', (compiler, type) => {
200 | let code = 'if (value !== null) {\n'
201 | code += ' return 1 + ' + compiler.callType('value', type) + '\n'
202 | code += '}'
203 | code += 'return 0'
204 | return compiler.wrapCode(code)
205 | }]
206 | }
207 | ```
208 |
209 | ### Skip Checks (optional)
210 |
211 | ProtoDef Compiler allows an optional `noArraySizeCheck` to be set. By default this value is `false`.
212 |
213 | If set to `true`, the compiler will skip array checks that appliy safety limits to avoid out of memory crashes. Sometimes these checks can be too restrictive, and the `noArraySizeCheck` parameter allows you to disable them.
214 |
215 | ```javascript
216 | const { ProtoDefCompiler } = require('protodef').Compiler
217 |
218 | // Create a ProtoDefCompiler instance
219 | const compiler = new ProtoDefCompiler()
220 | compiler.addTypesToCompile(require('./protocol.json'))
221 |
222 | // Compile a ProtoDef instance
223 | const compiledProto = await compiler.compileProtoDef()
224 |
225 | // Set the `noArraySizeCheck` variable to skip array checks.
226 | compiledProto.setVariable('noArraySizeCheck', true);
227 |
228 | // Use it as if it were a normal ProtoDef
229 | const buffer = compiledProto.createPacketBuffer('mainType', result)
230 | const result = compiledProto.parsePacketBuffer('mainType', buffer)
231 | ```
--------------------------------------------------------------------------------
/doc/history.md:
--------------------------------------------------------------------------------
1 | # History
2 |
3 | ## 1.19.0
4 | * [Update protodef.js to remove lodash.get (#167)](https://github.com/ProtoDef-io/node-protodef/commit/98e64f8b940378f791df8b4d6fccdfc873acd3b2) (thanks @rom1504)
5 |
6 | ## 1.18.0
7 | * [Update commands.yml to use GITHUB_TOKEN (#164)](https://github.com/ProtoDef-io/node-protodef/commit/ed625528ef109c7443b56528447f2cd84194e23a) (thanks @extremeheat)
8 | * [Add additional varint types and bitflags (#163)](https://github.com/ProtoDef-io/node-protodef/commit/89c2588fdd26f6100c406f57e8ca42c11c57af84) (thanks @extremeheat)
9 | * [Create commands.yml](https://github.com/ProtoDef-io/node-protodef/commit/67b411aacbf9ad2ccf0aaeac0bb4d9d568b568b3) (thanks @rom1504)
10 | * [Add Optional `skipChecks` to Disable Array Size Check (#154)](https://github.com/ProtoDef-io/node-protodef/commit/1173604de64d4c32f3ff9a2a789230885115870c) (thanks @bdkopen)
11 | * [add benchmark to ci](https://github.com/ProtoDef-io/node-protodef/commit/55e6c631cd14ccbb4d684e9f4e9c50e6fbbf809f) (thanks @rom1504)
12 |
13 | ## 1.17.0
14 |
15 | * Fix anon containers
16 |
17 | ## 1.16.0
18 |
19 | * update protodef definitions
20 | * Replace varint assert with exception
21 |
22 | ## 1.15.0
23 |
24 | * Fix 64-bit BigInt sign handling (@extremeheat)
25 | * Add string encoding option (@extremeheat)
26 | * Show BigInt instead of BigIntExtended when using console.log (@extremeheat)
27 |
28 | ## 1.14.0
29 |
30 | * Allow JSON stringified buffers to be converted to Buffer when serializing (@extremeheat)
31 | * Add primitive variables (@extremeheat)
32 |
33 | ## 1.13.0
34 |
35 | * add fullBuffer to parsePacketBuffer return
36 |
37 | ## 1.12.1
38 |
39 | * update to protodef 1.0.0
40 |
41 | ## 1.12.0
42 |
43 | * Fix issue with anon bitfield in compiler
44 |
45 | ## 1.11.0
46 |
47 | * compiler: add some more reserved keywords (@extremeheat)
48 |
49 | ## 1.10.0
50 |
51 | * exposed CompiledProtodef
52 |
53 | ## 1.9.0
54 |
55 | * compiler: Don't read abnormally large arrays (@extremeheat)
56 | * BigInt writing support, and support reading from buffer offset (@extremeheat)
57 |
58 | ## 1.8.3
59 |
60 | * in full packet parser don't error out in case of missing bytes
61 |
62 | ## 1.8.2
63 |
64 | * fixed aliases in compiler for write and size (thanks @Karang)
65 |
66 | ## 1.8.1
67 |
68 | * fixed to aliases in compiler (thanks @Karang)
69 |
70 | ## 1.8.0
71 |
72 | * add option not to log partial packets in full chunk parser
73 |
74 | ## 1.7.2
75 |
76 | * remove closure compiler
77 |
78 | ## 1.7.1
79 |
80 | * fix option in compiler
81 |
82 | ## 1.7.0
83 |
84 | * Add js compiler protodef implementation, that is 10x faster (thanks @Karang for this huge improvement !)
85 |
86 | ## 1.6.10
87 |
88 | * include .json files with the suffix
89 |
90 | ## 1.6.9
91 |
92 | * use standard style
93 |
94 | ## 1.6.8
95 |
96 | * update deps
97 |
98 | ## 1.6.7
99 |
100 | * stringify packet data before displaying it for an error of wrong length for fullpacketserializer
101 |
102 | ## 1.6.6
103 |
104 | * fix release
105 |
106 | ## 1.6.5
107 |
108 | * fix fullpacketparser error emission in case of partial packet
109 |
110 | ## 1.6.4
111 |
112 | * improve fullpacketparser error
113 |
114 | ## 1.6.3
115 |
116 | * fix fullpacketparser error
117 |
118 | ## 1.6.2
119 |
120 | * improve fullpacketparser error
121 |
122 | ## 1.6.1
123 |
124 | * fix FullPacketParser hiding errors
125 |
126 | ## 1.6.0
127 |
128 | * add full packet parser
129 |
130 | ## 1.5.1
131 |
132 | * fix optional validation
133 |
134 | ## 1.5.0
135 |
136 | * validation is now optional (opt-out)
137 |
138 | ## 1.4.0
139 |
140 | * implement aliases
141 |
142 | ## 1.3.1
143 |
144 | * fix countType : now behave as an ordinary type, remove undocumented countTypeArgs
145 |
146 | ## 1.3.0
147 |
148 | * validate types against type schemas using the protodef validator
149 |
150 | ## 1.2.3
151 |
152 | * fix sendCount : write return the offset, not the size, add a test for this
153 |
154 | ## 1.2.2
155 |
156 | * stop swallowing errors in parser and serializer
157 |
158 | ## 1.2.1
159 |
160 | * add li8, lu8 and u64, lu64 for consistency
161 |
162 | ## 1.2.0
163 |
164 | * all datatypes are tested
165 | * fix cstring
166 | * fix PartialReadError in i64
167 | * remove special count
168 | * use protodef spec
169 | * add little endian numerical types
170 |
171 | ## 1.1.2
172 |
173 | * allow hex values in mappings
174 |
175 | ## 1.1.1
176 |
177 | * update some more dependencies
178 |
179 | ## 1.1.0
180 |
181 | * update to babel6, remove some dependencies
182 |
183 | ## 1.0.3
184 |
185 | * fix slice the buffer in parsePacketBuffer
186 |
187 | ## 1.0.2
188 |
189 | * slice the buffer in parsePacketBuffer
190 |
191 | ## 1.0.1
192 |
193 | * let the parser error out without crashing on errors
194 |
195 | ## 1.0.0
196 |
197 | * change the name of numerical types
198 | * add doc
199 |
200 |
201 | ## 0.3.0
202 |
203 | * add partial packet support
204 |
205 | ## 0.2.6
206 |
207 | * add compareToValue (optional) option to switch
208 |
209 | ## 0.2.5
210 |
211 | * fix small error in switch
212 |
213 | ## 0.2.4
214 |
215 | * get back the example file as one file for simplicity and for tonic
216 |
217 | ## 0.2.3
218 |
219 | * fix a small mistake in mapping error
220 | * improve internal code
221 | * improve example
222 | * integrate with tonicdev
223 |
224 | ## 0.2.2
225 |
226 | * Fix writeOption : the offset wasn't properly updated
227 |
228 | ## 0.2.1
229 |
230 | * Anon fields may now be null/undefined.
231 |
232 | ## 0.2.0
233 |
234 | * add createPacketBuffer and parsePacketBuffer to ProtoDef class
235 | * expose utils functions
236 | * add mapper and pstring datatypes
237 |
238 | ## 0.1.0
239 |
240 | * add the serializer and parser
241 | * expose the default datatypes
242 | * add an example
243 |
244 | ## 0.0.1
245 |
246 | * basic version, mostly contain the ProtoDef class and the datatype
247 |
--------------------------------------------------------------------------------
/doc/newDatatypes.md:
--------------------------------------------------------------------------------
1 | # New datatypes
2 |
3 | You can create new datatypes for Protodef.
4 |
5 | Three functions need to be created to define a datatype.
6 |
7 | An optional `schema` property can be added to validate whether the type is used properly in new types.
8 | It must be defined as a json schema. See the [ProtoDef](https://github.com/ProtoDef-io/ProtoDef) repo for schema examples of existing types.
9 |
10 | ## read
11 |
12 | read takes 4 arguments:
13 |
14 | * buffer : the buffer from which to read the data
15 | * offset : where to start reading the buffer
16 | * typeArgs : (optional) the arguments passed to the type
17 | * context : (optional) an object to get values previously read in the containing type
18 |
19 | It must returns an object with two values:
20 | * value : the read value
21 | * size : the number of bytes read by the buffer
22 |
23 | Example:
24 |
25 | That read function has 2 typeArgs : type and endVal.
26 | It increases the value of a cursor in order to eventually return the size.
27 | The PartialReadError error needs to be thrown if buffer doesn't contain enough bytes.
28 |
29 | ```js
30 | function readEntityMetadata(buffer, offset, {type,endVal}) {
31 | let cursor = offset;
32 | const metadata = [];
33 | let item;
34 | while(true) {
35 | if(offset+1>buffer.length)
36 | throw new PartialReadError();
37 | item = buffer.readUInt8(cursor);
38 | if(item === endVal) {
39 | return {
40 | value: metadata,
41 | size: cursor + 1 - offset
42 | };
43 | }
44 | const results = this.read(buffer, cursor, type, {});
45 | metadata.push(results.value);
46 | cursor += results.size;
47 | }
48 | }
49 | ```
50 |
51 | Useful function: this.read which takes 5 arguments:
52 |
53 | * buffer : the buffer from which to read the data
54 | * offset : where to start reading the buffer
55 | * type : the type to read
56 | * typeArgs : (optional) the arguments passed to the type
57 | * context : (optional) an object to get values previously read in the containing type
58 |
59 | Can be used to read already existing datatype.
60 |
61 | ## write
62 |
63 | write takes 5 arguments:
64 |
65 | * value : the value to be written
66 | * buffer : the buffer to write in
67 | * offset : the offset at which to write in the buffer
68 | * typeArgs : (optional) the arguments passed to the type
69 | * context : (optional) an object to get values previously read in the containing type
70 |
71 | It must return the offset increased with the number of bytes written.
72 |
73 | Example:
74 |
75 | This write function updates the offset at each this.write called.
76 | ```js
77 | function writeEntityMetadata(value, buffer, offset, {type,endVal}) {
78 | const self = this;
79 | value.forEach(function(item) {
80 | offset = self.write(item, buffer, offset, type, {});
81 | });
82 | buffer.writeUInt8(endVal, offset);
83 | return offset + 1;
84 | }
85 | ```
86 |
87 | Useful function: this.write which takes 6 arguments:
88 |
89 | * value : the value to be written
90 | * buffer : the buffer to write in
91 | * offset : the offset at which to write in the buffer
92 | * type : the type to write
93 | * typeArgs : (optional) the arguments passed to the type
94 | * context : (optional) an object to get values previously written in the containing type
95 |
96 | ## sizeOf
97 |
98 | sizeOf takes 3 arguments:
99 |
100 | * value : the value for which to compute the buffer size
101 | * typeArgs : (optional) the arguments passed to the type
102 | * context : (optional) an object to get values previously read in the containing type
103 |
104 | It must return the size of the buffer needed to write the given value.
105 |
106 | Example:
107 |
108 | This sizeOf function calls this.sizeOf to get the size of each value and return the sum.
109 | ```js
110 | function sizeOfEntityMetadata(value, {type}) {
111 | let size = 1;
112 | for(let i = 0; i < value.length; ++i) {
113 | size += this.sizeOf(value[i], type, {});
114 | }
115 | return size;
116 | }
117 | ```
118 |
119 | Useful function : this.sizeOf which takes 4 arguments:
120 |
121 | * value : the value for which to compute the buffer size
122 | * type : the type to get the size of
123 | * typeArgs : (optional) the arguments passed to the type
124 | * context : (optional) an object to get values previously written in the containing type
125 |
--------------------------------------------------------------------------------
/example.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const ProtoDef = require('protodef').ProtoDef
3 | const Serializer = require('protodef').Serializer
4 | const Parser = require('protodef').Parser
5 |
6 | BigInt.prototype.toJSON = function () { // eslint-disable-line -- Allow serializing BigIntegers
7 | return this.toString()
8 | }
9 |
10 | // the protocol can be in a separate json file
11 | const exampleProtocol = {
12 | container: 'native',
13 | varint: 'native',
14 | byte: 'native',
15 | bool: 'native',
16 | switch: 'native',
17 | bitflags: 'native',
18 | entity_look: [
19 | 'container',
20 | [
21 | {
22 | name: 'entityId',
23 | type: 'varint'
24 | },
25 | {
26 | name: 'yaw',
27 | type: 'i8'
28 | },
29 | {
30 | name: 'pitch',
31 | type: 'i8'
32 | },
33 | { name: 'flags', type: ['bitflags', { type: 'u8', flags: ['onGround'] }] },
34 | { name: 'longId', type: 'varint64' },
35 | { name: 'longerId', type: 'varint128' },
36 | { name: 'zigzagId', type: 'zigzag32' },
37 | { name: 'zigzagBig', type: 'zigzag64' }
38 | ]
39 | ],
40 | packet: [
41 | 'container',
42 | [
43 | {
44 | name: 'name',
45 | type: [
46 | 'mapper',
47 | {
48 | type: 'varint',
49 | mappings: {
50 | 22: 'entity_look'
51 | }
52 | }
53 | ]
54 | },
55 | {
56 | name: 'params',
57 | type: [
58 | 'switch',
59 | {
60 | compareTo: 'name',
61 | fields: {
62 | entity_look: 'entity_look'
63 | }
64 | }
65 | ]
66 | }
67 | ]
68 | ]
69 | }
70 |
71 | const proto = new ProtoDef()
72 | proto.addTypes(exampleProtocol)
73 | const parser = new Parser(proto, 'packet')
74 | const serializer = new Serializer(proto, 'packet')
75 |
76 | serializer.write({
77 | name: 'entity_look',
78 | params: {
79 | entityId: 1,
80 | yaw: 1,
81 | pitch: 6,
82 | flags: {
83 | onGround: true
84 | },
85 | longId: 13n,
86 | longerId: 2n ** 68n, // 9 bytes integer, 10 over wire
87 | zigzagId: -3,
88 | zigzagBig: 4294967296n
89 | }
90 | })
91 | serializer.pipe(parser)
92 |
93 | parser.on('data', function (chunk) {
94 | console.dir(chunk, { depth: null })
95 | assert.deepEqual([...chunk.buffer], [22, 1, 1, 6, 1, 13, 128, 128, 128, 128, 128, 128, 128, 128, 128, 32, 5, 128, 128, 128, 128, 32])
96 | })
97 |
--------------------------------------------------------------------------------
/examples/compiled.js:
--------------------------------------------------------------------------------
1 | const ProtoDef = require('protodef').ProtoDef
2 | const { performance } = require('perf_hooks')
3 | const { ProtoDefCompiler } = require('protodef').Compiler
4 |
5 | const exampleProtocol = require('./example_protocol.json')
6 | const mainType = 'packet'
7 | const packetData = {
8 | name: 'entity_look',
9 | params: {
10 | entityId: 1,
11 | yaw: 1,
12 | pitch: 1,
13 | onGround: true,
14 | position: {
15 | x: 42,
16 | y: 255,
17 | z: -1337
18 | }
19 | }
20 | };
21 |
22 | (async () => {
23 | const proto = new ProtoDef()
24 | proto.addTypes(exampleProtocol)
25 |
26 | const compiler = new ProtoDefCompiler()
27 | compiler.addTypesToCompile(exampleProtocol)
28 | const compiledProto = await compiler.compileProtoDef()
29 |
30 | const buffer = proto.createPacketBuffer(mainType, packetData)
31 | const result = compiledProto.parsePacketBuffer(mainType, buffer).data
32 | console.log(JSON.stringify(result, null, 2))
33 | const buffer2 = compiledProto.createPacketBuffer(mainType, packetData)
34 | const result2 = proto.parsePacketBuffer(mainType, buffer2).data
35 | console.log(JSON.stringify(result2, null, 2))
36 |
37 | const nbTests = 10000000
38 | console.log('Running ' + nbTests + ' tests')
39 | let start, time, ps
40 |
41 | start = performance.now()
42 | for (let i = 0; i < nbTests; i++) {
43 | const result = compiledProto.parsePacketBuffer(mainType, buffer).data
44 | compiledProto.createPacketBuffer(mainType, result)
45 | }
46 | time = performance.now() - start
47 | ps = nbTests / time
48 | console.log('read / write compiled: ' + time.toFixed(2) + ' ms (' + ps.toFixed(2) + 'k packet/s)')
49 |
50 | start = performance.now()
51 | for (let i = 0; i < nbTests / 10; i++) { // less tests otherwise too long
52 | const result = proto.parsePacketBuffer(mainType, buffer).data
53 | proto.createPacketBuffer(mainType, result)
54 | }
55 | time = performance.now() - start
56 | ps = nbTests / 10 / time
57 | console.log('read / write parser: ' + time.toFixed(2) + ' ms (' + ps.toFixed(2) + 'k packet/s)')
58 | })()
59 |
--------------------------------------------------------------------------------
/examples/error_handling.js:
--------------------------------------------------------------------------------
1 | const ProtoDef = require('protodef').ProtoDef
2 | const Serializer = require('protodef').Serializer
3 | const Parser = require('protodef').Parser
4 |
5 | const exampleProtocol = require('./example_protocol.json')
6 |
7 | const proto = new ProtoDef()
8 | proto.addTypes(exampleProtocol)
9 | const parser = new Parser(proto, 'packet')
10 | const serializer = new Serializer(proto, 'packet')
11 |
12 | serializer.write({
13 | name: 'entity_look',
14 | params: {
15 | entityId: 1,
16 | yaw: 1,
17 | pitch: 1,
18 | onGround: true
19 | }
20 | })
21 |
22 | parser.on('error', function (err) {
23 | console.log(err.stack)
24 | console.log(err.buffer)
25 | })
26 |
27 | parser.write(Buffer.from([0x17, 0x01, 0x01, 0x01, 0x01]))
28 |
29 | serializer.pipe(parser)
30 |
31 | parser.on('data', function (chunk) {
32 | console.log(JSON.stringify(chunk.data, null, 2))
33 | })
34 |
--------------------------------------------------------------------------------
/examples/example_protocol.json:
--------------------------------------------------------------------------------
1 | {
2 | "container": "native",
3 | "varint": "native",
4 | "byte": "native",
5 | "bool": "native",
6 | "switch": "native",
7 | "entity_look": [
8 | "container",
9 | [
10 | {
11 | "name": "entityId",
12 | "type": "varint"
13 | },
14 | {
15 | "name": "yaw",
16 | "type": "i8"
17 | },
18 | {
19 | "name": "pitch",
20 | "type": "i8"
21 | },
22 | {
23 | "name": "onGround",
24 | "type": "bool"
25 | },
26 | {
27 | "name": "position",
28 | "type":
29 | ["bitfield", [
30 | { "name": "x", "size": 26, "signed": true },
31 | { "name": "y", "size": 12, "signed": true },
32 | { "name": "z", "size": 26, "signed": true }
33 | ]]
34 | }
35 | ]
36 | ],
37 | "packet": [
38 | "container",
39 | [
40 | {
41 | "name": "name",
42 | "type": [
43 | "mapper",
44 | {
45 | "type": "varint",
46 | "mappings": {
47 | "22": "entity_look"
48 | }
49 | }
50 | ]
51 | },
52 | {
53 | "name": "params",
54 | "type": [
55 | "switch",
56 | {
57 | "compareTo": "name",
58 | "fields": {
59 | "entity_look": "entity_look"
60 | }
61 | }
62 | ]
63 | }
64 | ]
65 | ]
66 | }
--------------------------------------------------------------------------------
/examples/full_protocol.js:
--------------------------------------------------------------------------------
1 | const ProtoDef = require('protodef').ProtoDef
2 | const Serializer = require('protodef').Serializer
3 | const Parser = require('protodef').Parser
4 |
5 | const exampleProtocol = require('./full_protocol_example.json')
6 |
7 | const proto = new ProtoDef()
8 | proto.addProtocol(exampleProtocol, ['login', 'toClient'])
9 | const parser = new Parser(proto, 'packet')
10 | const serializer = new Serializer(proto, 'packet')
11 |
12 | serializer.write({
13 | name: 'success',
14 | params: {
15 | uuid: 'some uuid',
16 | username: 'some name'
17 | }
18 | })
19 |
20 | parser.on('error', function (err) {
21 | console.log(err.stack)
22 | console.log(err.buffer)
23 | })
24 |
25 | serializer.pipe(parser)
26 |
27 | parser.on('data', function (chunk) {
28 | console.log(JSON.stringify(chunk.data, null, 2))
29 | })
30 |
--------------------------------------------------------------------------------
/examples/full_protocol_example.json:
--------------------------------------------------------------------------------
1 | {
2 | "types": {
3 | "varint": "native",
4 | "pstring": "native",
5 | "u16": "native",
6 | "u8": "native",
7 | "i64": "native",
8 | "buffer": "native",
9 | "i32": "native",
10 | "i8": "native",
11 | "bool": "native",
12 | "i16": "native",
13 | "f32": "native",
14 | "f64": "native",
15 | "option": "native",
16 | "bitfield": "native",
17 | "container": "native",
18 | "switch": "native",
19 | "void": "native",
20 | "array": "native",
21 | "string": [
22 | "pstring",
23 | {
24 | "countType": "varint"
25 | }
26 | ]
27 | },
28 | "status": {
29 | "toClient": {
30 | "types": {
31 | "packet_server_info": [
32 | "container",
33 | [
34 | {
35 | "name": "response",
36 | "type": "string"
37 | }
38 | ]
39 | ],
40 | "packet_ping": [
41 | "container",
42 | [
43 | {
44 | "name": "time",
45 | "type": "i64"
46 | }
47 | ]
48 | ],
49 | "packet": [
50 | "container",
51 | [
52 | {
53 | "name": "name",
54 | "type": [
55 | "mapper",
56 | {
57 | "type": "varint",
58 | "mappings": {
59 | "0x00": "server_info",
60 | "0x01": "ping"
61 | }
62 | }
63 | ]
64 | },
65 | {
66 | "name": "params",
67 | "type": [
68 | "switch",
69 | {
70 | "compareTo": "name",
71 | "fields": {
72 | "server_info": "packet_server_info",
73 | "ping": "packet_ping"
74 | }
75 | }
76 | ]
77 | }
78 | ]
79 | ]
80 | }
81 | },
82 | "toServer": {
83 | "types": {
84 | "packet_ping_start": [
85 | "container",
86 | []
87 | ],
88 | "packet_ping": [
89 | "container",
90 | [
91 | {
92 | "name": "time",
93 | "type": "i64"
94 | }
95 | ]
96 | ],
97 | "packet": [
98 | "container",
99 | [
100 | {
101 | "name": "name",
102 | "type": [
103 | "mapper",
104 | {
105 | "type": "varint",
106 | "mappings": {
107 | "0x00": "ping_start",
108 | "0x01": "ping"
109 | }
110 | }
111 | ]
112 | },
113 | {
114 | "name": "params",
115 | "type": [
116 | "switch",
117 | {
118 | "compareTo": "name",
119 | "fields": {
120 | "ping_start": "packet_ping_start",
121 | "ping": "packet_ping"
122 | }
123 | }
124 | ]
125 | }
126 | ]
127 | ]
128 | }
129 | }
130 | },
131 | "login": {
132 | "toClient": {
133 | "types": {
134 | "packet_success": [
135 | "container",
136 | [
137 | {
138 | "name": "uuid",
139 | "type": "string"
140 | },
141 | {
142 | "name": "username",
143 | "type": "string"
144 | }
145 | ]
146 | ],
147 | "packet": [
148 | "container",
149 | [
150 | {
151 | "name": "name",
152 | "type": [
153 | "mapper",
154 | {
155 | "type": "varint",
156 | "mappings": {
157 | "0x02": "success"
158 | }
159 | }
160 | ]
161 | },
162 | {
163 | "name": "params",
164 | "type": [
165 | "switch",
166 | {
167 | "compareTo": "name",
168 | "fields": {
169 | "success": "packet_success"
170 | }
171 | }
172 | ]
173 | }
174 | ]
175 | ]
176 | }
177 | }
178 | }
179 | }
180 |
--------------------------------------------------------------------------------
/examples/variable.js:
--------------------------------------------------------------------------------
1 | const { ProtoDef } = require('protodef')
2 | const assert = require('assert')
3 |
4 | // Create a protocol where DrawText is sent with a "opacity" field at the end only if the color isn't transparent.
5 | const protocol = {
6 | string: ['pstring', { countType: 'varint' }],
7 | ColorPalette: ['container', [{ name: 'palette', type: ['array', { countType: 'i32', type: 'string' }] }]],
8 | DrawText: ['container', [{ name: 'color', type: 'u8' }, { name: 'opacity', type: ['switch', { compareTo: 'color', fields: { '/color_transparent': 'void' }, default: 'u8' }] }]]
9 | }
10 |
11 | function test () {
12 | // A "palette" here refers to a array of values, identified with their index in the array
13 | const palette = ['red', 'green', 'blue', 'transparent']
14 | const proto = new ProtoDef()
15 | proto.addTypes(protocol)
16 | // A "variable" is similar to a type, it's a primitive value that can be used in switch comparisons.
17 | proto.setVariable('color_transparent', palette.indexOf('transparent'))
18 | // An example usage is sending paletted IDs, with feild serialization based on those IDs
19 | proto.createPacketBuffer('ColorPalette', { palette })
20 | // Here, "opacity", 0x4 is written *only* if the color isn't transparent. In this case, it is, so 0x4 isn't written.
21 | // At the top is 0x3, the index of the "transparent" color.
22 | const s = proto.createPacketBuffer('DrawText', { color: palette.indexOf('transparent'), opacity: 4 })
23 | assert(s.equals(Buffer.from([3])))
24 | console.log(s)
25 |
26 | // Here 4 should be written at the end
27 | const t = proto.createPacketBuffer('DrawText', { color: palette.indexOf('blue'), opacity: 4 })
28 | assert(t.equals(Buffer.from([2, 4])))
29 | }
30 |
31 | test()
32 |
--------------------------------------------------------------------------------
/index.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | type ReadFn = (buffer: Buffer, cursor: number, _fieldInfo: any, rootNodes: any) => any
4 | type WriteFn = (value: any, buffer: Buffer, offset: number, _fieldInfo: any, rootNode: any) => number
5 | type SizeOfFn = ((value: any, _fieldInfo: any, rootNode: any) => number)
6 |
7 | type FieldInfo = string | { type: string, typeArgs: any }
8 | type TypeFunc = [ReadFn, WriteFn, number | SizeOfFn, ?any]
9 | type TypeParams = any
10 | declare interface TypeParamsCounted { count: number | FieldInfo, countType: TypeDef }
11 | type TypeDef = 'native' | TypeFunc | [string, any]
12 | type TypesDef = { [field: string]: TypeDef }
13 | type Protocol = {
14 | types: TypesDef
15 | [field: string]: TypeDef | Protocol
16 | }
17 | type Results = {
18 | value: any,
19 | size: number
20 | }
21 | type ExtendedResults = {
22 | data: any,
23 | metadata: {
24 | size: number
25 | },
26 | buffer: Buffer,
27 | fullBuffer: Buffer
28 | }
29 |
30 | declare abstract class TransformSerialization extends Transform {
31 | private proto: ProtoDef
32 | private mainType: string
33 | constructor(proto: ProtoDef, mainType: string)
34 | private _transform(chunk: any, enc: BufferEncoding, cb: CallableFunction): never
35 | }
36 |
37 | declare class ProtodefValidator {
38 | constructor(typesSchemas: unknown)
39 | createAjvInstance(typesSchemas): void
40 | addDefaultTypes(): void
41 | addTypes(schemas): void
42 | typeToSchemaName(name: string): string
43 | addType(name: string, schema: unknown): void
44 | validateType(type: unknown): void
45 | validateTypeGoingInside(type: unknown): void
46 | validateProtocol(protocol: unknown): void
47 | }
48 | declare type TypeDefKind = 'native' | 'context' | 'parametrizable'
49 |
50 | declare abstract class ProtodefBaseCompiler {
51 | primitiveTypes = {}
52 | native = {}
53 | context = {}
54 | types: TypesDef
55 | scopeStack = []
56 | parameterizableTypes = {}
57 | addNativeType(type: string, fn: CallableFunction): void
58 | addContextType(type: string, fn: CallableFunction): void
59 | addParametrizableType(type: string, maker: CallableFunction): void
60 | addTypes(types: { [key: string]: [TypeDefKind, CallableFunction] }): void
61 | addProtocol(protocolData: Protocol, path: string[]): void
62 | protected addTypesToCompile(types: any): void
63 | protected indent(code: string, indent: string): string
64 | protected getField(name: string): any
65 | generate(): string
66 | compile(code: string): Function
67 | protected wrapCode (code: string, args: string[]): string
68 | protected compileType(type: string | any[]): string
69 | }
70 |
71 | declare class ProtodefReadCompiler extends ProtodefBaseCompiler {
72 | private callType(value: string, type: string | any[], offsetExpr: string, args: string[]): string
73 | }
74 |
75 | declare class ProtodefWriteCompiler extends ProtodefBaseCompiler {
76 | private callType(value: string, type: string | any[], offsetExpr: string, args: string[]): string
77 | }
78 |
79 | declare class ProtodefSizeOfCompiler extends ProtodefBaseCompiler {
80 | private callType(value: string, type: string | any[], args: string[]): string
81 | }
82 |
83 | declare class ProtodefCompiler {
84 | readCompiler: ProtodefReadCompiler
85 | writeCompiler: ProtodefWriteCompiler
86 | sizeOfCompiler: ProtodefSizeOfCompiler
87 | addTypes(types: { [key: string]: [TypeDefKind, CallableFunction] }): void
88 | addProtocol(protocolData: Protocol, path: string[]): void
89 | protected addTypesToCompile(types: any): void
90 | addVariable(key: string, val: any): void
91 | compileProtoDefSync(options?: { printCode?: boolean }): CompiledProtoDef
92 | }
93 |
94 | declare abstract class AbstractProtoDefInterface {
95 | read: ReadFn
96 | write: WriteFn
97 | sizeOf: SizeOfFn
98 | createPacketBuffer(type: string, packet: any): Buffer
99 | parsePacketBuffer(type: string, buffer: Buffer, offset = 0): ExtendedResults
100 | }
101 |
102 | declare class CompiledProtoDef extends AbstractProtoDefInterface {
103 | private sizeOfCtx: SizeOfFn
104 | private writeCtx: WriteFn
105 | private readCtx: ReadFn
106 | constructor(sizeOfCtx: SizeOfFn, writeCtx: WriteFn, readCtx: ReadFn)
107 | setVariable(key: string, val: any): void
108 | }
109 |
110 | declare class ProtodefPartialError extends Error {
111 | partialReadError: true
112 | constructor(message?: string)
113 | }
114 |
115 | declare module 'protodef' {
116 | export class ProtoDef extends AbstractProtoDefInterface {
117 | private types: TypesDef
118 | constructor(validation: boolean = true)
119 | private addDefaultTypes(): void
120 | addType(name: string, functions: TypeDef, validate = true): void
121 | addTypes(types: TypesDef): void
122 | addProtocol(protocolData: Protocol, path: string[]): void
123 | setVariable(key: string, val: any): void
124 | }
125 | export class Serializer extends TransformSerialization {
126 | private queue: Buffer
127 | createPacketBuffer(packet: any): Buffer
128 | }
129 | export class Parser extends TransformSerialization {
130 | private queue: Buffer
131 | parsePacketBuffer(packet: any): Buffer
132 | }
133 | export class FullPacketParser extends TransformSerialization {
134 | noErrorLogging: boolean
135 | constructor(proto: ProtoDef, mainType: string, noErrorLogging = false)
136 | parsePacketBuffer(packet: any): Buffer
137 | }
138 | export const Compiler: {
139 | ReadCompiler: typeof ProtodefReadCompiler
140 | WriteCompiler: typeof ProtodefWriteCompiler
141 | SizeOfCompiler: typeof ProtodefSizeOfCompiler
142 | ProtoDefCompiler: typeof ProtodefCompiler
143 | CompiledProtodef: typeof CompiledProtodef
144 | }
145 | export const utils: {
146 | getField(countField: string, context: object): any | undefined
147 | getFieldInfo(fieldInfo: FieldInfo): FieldInfo
148 | addErrorField(e: Error & { field: string }, field: string): Error & { field: string }
149 | getCount(buffer: Buffer, offset: number, options: TypeParamsCounted, rootNode: any): { count: number, size: number }
150 | sendCount(len: number, buffer: Buffer, offset: number, options: TypeParamsCounted, rootNode: any): number
151 | calcCount(len: number, options: TypeParamsCounted, rootNode: any): number
152 | tryCatch(tryfn: CallableFunction, catchfn: CallableFunction): any
153 | PartialReadError
154 | }
155 | }
156 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | module.exports = require('./src/index.js')
2 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "protodef",
3 | "version": "1.19.0",
4 | "description": "A simple yet powerful way to define binary protocols",
5 | "main": "index.js",
6 | "types": "index.d.ts",
7 | "author": "roblabla ",
8 | "scripts": {
9 | "lint": "standard",
10 | "fix": "standard --fix",
11 | "unit-test": "mocha --recursive --reporter spec",
12 | "test": "npm run lint && npm run unit-test",
13 | "benchmark": "mocha benchmark/benchmark_unified.js"
14 | },
15 | "tonicExampleFilename": "example.js",
16 | "license": "MIT",
17 | "dependencies": {
18 | "lodash.reduce": "^4.6.0",
19 | "protodef-validator": "^1.3.0",
20 | "readable-stream": "^4.4.0"
21 | },
22 | "engines": {
23 | "node": ">=14"
24 | },
25 | "bugs": {
26 | "url": "https://github.com/ProtoDef-io/node-protodef/issues"
27 | },
28 | "homepage": "https://github.com/ProtoDef-io/node-protodef",
29 | "repository": {
30 | "type": "git",
31 | "url": "https://github.com/ProtoDef-io/node-protodef.git"
32 | },
33 | "devDependencies": {
34 | "benchmark": "^2.1.4",
35 | "chai": "^4.1.2",
36 | "jsonschema": "^1.2.4",
37 | "mocha": "^10.2.0",
38 | "protodef": "file:.",
39 | "standard": "^17.0.0"
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/compiler.js:
--------------------------------------------------------------------------------
1 | const numeric = require('./datatypes/numeric')
2 | const utils = require('./datatypes/utils')
3 |
4 | const conditionalDatatypes = require('./datatypes/compiler-conditional')
5 | const structuresDatatypes = require('./datatypes/compiler-structures')
6 | const utilsDatatypes = require('./datatypes/compiler-utils')
7 |
8 | const { tryCatch } = require('./utils')
9 |
10 | class ProtoDefCompiler {
11 | constructor () {
12 | this.readCompiler = new ReadCompiler()
13 | this.writeCompiler = new WriteCompiler()
14 | this.sizeOfCompiler = new SizeOfCompiler()
15 | }
16 |
17 | addTypes (types) {
18 | this.readCompiler.addTypes(types.Read)
19 | this.writeCompiler.addTypes(types.Write)
20 | this.sizeOfCompiler.addTypes(types.SizeOf)
21 | }
22 |
23 | addTypesToCompile (types) {
24 | this.readCompiler.addTypesToCompile(types)
25 | this.writeCompiler.addTypesToCompile(types)
26 | this.sizeOfCompiler.addTypesToCompile(types)
27 | }
28 |
29 | addProtocol (protocolData, path) {
30 | this.readCompiler.addProtocol(protocolData, path)
31 | this.writeCompiler.addProtocol(protocolData, path)
32 | this.sizeOfCompiler.addProtocol(protocolData, path)
33 | }
34 |
35 | addVariable (key, val) {
36 | this.readCompiler.addContextType(key, val)
37 | this.writeCompiler.addContextType(key, val)
38 | this.sizeOfCompiler.addContextType(key, val)
39 | }
40 |
41 | compileProtoDefSync (options = { printCode: false }) {
42 | const sizeOfCode = this.sizeOfCompiler.generate()
43 | const writeCode = this.writeCompiler.generate()
44 | const readCode = this.readCompiler.generate()
45 | if (options.printCode) {
46 | console.log('// SizeOf:')
47 | console.log(sizeOfCode)
48 | console.log('// Write:')
49 | console.log(writeCode)
50 | console.log('// Read:')
51 | console.log(readCode)
52 | }
53 | const sizeOfCtx = this.sizeOfCompiler.compile(sizeOfCode)
54 | const writeCtx = this.writeCompiler.compile(writeCode)
55 | const readCtx = this.readCompiler.compile(readCode)
56 | return new CompiledProtodef(sizeOfCtx, writeCtx, readCtx)
57 | }
58 | }
59 |
60 | class CompiledProtodef {
61 | constructor (sizeOfCtx, writeCtx, readCtx) {
62 | this.sizeOfCtx = sizeOfCtx
63 | this.writeCtx = writeCtx
64 | this.readCtx = readCtx
65 | }
66 |
67 | read (buffer, cursor, type) {
68 | const readFn = this.readCtx[type]
69 | if (!readFn) { throw new Error('missing data type: ' + type) }
70 | return readFn(buffer, cursor)
71 | }
72 |
73 | write (value, buffer, cursor, type) {
74 | const writeFn = this.writeCtx[type]
75 | if (!writeFn) { throw new Error('missing data type: ' + type) }
76 | return writeFn(value, buffer, cursor)
77 | }
78 |
79 | setVariable (key, val) {
80 | this.sizeOfCtx[key] = val
81 | this.readCtx[key] = val
82 | this.writeCtx[key] = val
83 | }
84 |
85 | sizeOf (value, type) {
86 | const sizeFn = this.sizeOfCtx[type]
87 | if (!sizeFn) { throw new Error('missing data type: ' + type) }
88 | if (typeof sizeFn === 'function') {
89 | return sizeFn(value)
90 | } else {
91 | return sizeFn
92 | }
93 | }
94 |
95 | createPacketBuffer (type, packet) {
96 | const length = tryCatch(() => this.sizeOf(packet, type),
97 | (e) => {
98 | e.message = `SizeOf error for ${e.field} : ${e.message}`
99 | throw e
100 | })
101 | const buffer = Buffer.allocUnsafe(length)
102 | tryCatch(() => this.write(packet, buffer, 0, type),
103 | (e) => {
104 | e.message = `Write error for ${e.field} : ${e.message}`
105 | throw e
106 | })
107 | return buffer
108 | }
109 |
110 | parsePacketBuffer (type, buffer, offset = 0) {
111 | const { value, size } = tryCatch(() => this.read(buffer, offset, type),
112 | (e) => {
113 | e.message = `Read error for ${e.field} : ${e.message}`
114 | throw e
115 | })
116 | return {
117 | data: value,
118 | metadata: { size },
119 | buffer: buffer.slice(0, size),
120 | fullBuffer: buffer
121 | }
122 | }
123 | }
124 |
125 | class Compiler {
126 | constructor () {
127 | this.primitiveTypes = {}
128 | this.native = {}
129 | this.context = {}
130 | this.types = {}
131 | this.scopeStack = []
132 | this.parameterizableTypes = {}
133 | }
134 |
135 | /**
136 | * A native type is a type read or written by a function that will be called in it's
137 | * original context.
138 | * @param {*} type
139 | * @param {*} fn
140 | */
141 | addNativeType (type, fn) {
142 | this.primitiveTypes[type] = `native.${type}`
143 | this.native[type] = fn
144 | this.types[type] = 'native'
145 | }
146 |
147 | /**
148 | * A context type is a type that will be called in the protocol's context. It can refer to
149 | * registred native types using native.{type}() or context type (provided and generated)
150 | * using ctx.{type}(), but cannot access it's original context.
151 | * @param {*} type
152 | * @param {*} fn
153 | */
154 | addContextType (type, fn) {
155 | this.primitiveTypes[type] = `ctx.${type}`
156 | this.context[type] = fn.toString()
157 | }
158 |
159 | /**
160 | * A parametrizable type is a function that will be generated at compile time using the
161 | * provided maker function
162 | * @param {*} type
163 | * @param {*} maker
164 | */
165 | addParametrizableType (type, maker) {
166 | this.parameterizableTypes[type] = maker
167 | }
168 |
169 | addTypes (types) {
170 | for (const [type, [kind, fn]] of Object.entries(types)) {
171 | if (kind === 'native') this.addNativeType(type, fn)
172 | else if (kind === 'context') this.addContextType(type, fn)
173 | else if (kind === 'parametrizable') this.addParametrizableType(type, fn)
174 | }
175 | }
176 |
177 | addTypesToCompile (types) {
178 | for (const [type, json] of Object.entries(types)) {
179 | // Replace native type, otherwise first in wins
180 | if (!this.types[type] || this.types[type] === 'native') this.types[type] = json
181 | }
182 | }
183 |
184 | addProtocol (protocolData, path) {
185 | const self = this
186 | function recursiveAddTypes (protocolData, path) {
187 | if (protocolData === undefined) { return }
188 | if (protocolData.types) { self.addTypesToCompile(protocolData.types) }
189 | recursiveAddTypes(protocolData[path.shift()], path)
190 | }
191 | recursiveAddTypes(protocolData, path.slice(0))
192 | }
193 |
194 | indent (code, indent = ' ') {
195 | return code.split('\n').map((line) => indent + line).join('\n')
196 | }
197 |
198 | getField (name, noAssign) {
199 | const path = name.split('/')
200 | let i = this.scopeStack.length - 1
201 | const reserved = ['value', 'enum', 'default', 'size', 'offset']
202 | while (path.length) {
203 | const scope = this.scopeStack[i]
204 | const field = path.shift()
205 | if (field === '..') {
206 | i--
207 | continue
208 | }
209 | // We are at the right level
210 | if (scope[field]) return scope[field] + (path.length ? ('.' + path.join('.')) : '')
211 | if (path.length !== 0) {
212 | throw new Error('Cannot access properties of undefined field')
213 | }
214 | // Count how many collision occured in the scope
215 | let count = 0
216 | if (reserved.includes(field)) count++
217 | for (let j = 0; j < i; j++) {
218 | if (this.scopeStack[j][field]) count++
219 | }
220 | if (noAssign) { // referencing a variable, inherit from parent scope
221 | scope[field] = field
222 | } else { // creating a new variable in this scope
223 | scope[field] = field + (count || '') // If the name is already used, add a number
224 | }
225 | return scope[field]
226 | }
227 | throw new Error('Unknown field ' + path)
228 | }
229 |
230 | generate () {
231 | this.scopeStack = [{}]
232 | const functions = []
233 | for (const type in this.context) {
234 | functions[type] = this.context[type]
235 | }
236 | for (const type in this.types) {
237 | if (!functions[type]) {
238 | if (this.types[type] !== 'native') {
239 | functions[type] = this.compileType(this.types[type])
240 | if (functions[type].startsWith('ctx')) {
241 | functions[type] = 'function () { return ' + functions[type] + '(...arguments) }'
242 | }
243 | if (!isNaN(functions[type])) { functions[type] = this.wrapCode(' return ' + functions[type]) }
244 | } else {
245 | functions[type] = `native.${type}`
246 | }
247 | }
248 | }
249 | return '() => {\n' + this.indent('const ctx = {\n' + this.indent(Object.keys(functions).map((type) => {
250 | return type + ': ' + functions[type]
251 | }).join(',\n')) + '\n}\nreturn ctx') + '\n}'
252 | }
253 |
254 | /**
255 | * Compile the given js code, providing native.{type} to the context, return the compiled types
256 | * @param {*} code
257 | */
258 | compile (code) {
259 | // Local variable to provide some context to eval()
260 | const native = this.native // eslint-disable-line
261 | const { PartialReadError } = require('./utils') // eslint-disable-line
262 | return eval(code)() // eslint-disable-line
263 | }
264 | }
265 |
266 | class ReadCompiler extends Compiler {
267 | constructor () {
268 | super()
269 |
270 | this.addTypes(conditionalDatatypes.Read)
271 | this.addTypes(structuresDatatypes.Read)
272 | this.addTypes(utilsDatatypes.Read)
273 |
274 | // Add default types
275 | for (const key in numeric) {
276 | this.addNativeType(key, numeric[key][0])
277 | }
278 | for (const key in utils) {
279 | this.addNativeType(key, utils[key][0])
280 | }
281 | }
282 |
283 | compileType (type) {
284 | if (type instanceof Array) {
285 | if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
286 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
287 | return this.wrapCode('return ' + this.callType(type[0], 'offset', Object.values(type[1])))
288 | }
289 | throw new Error('Unknown parametrizable type: ' + JSON.stringify(type[0]))
290 | } else { // Primitive type
291 | if (type === 'native') return 'null'
292 | if (this.types[type]) { return 'ctx.' + type }
293 | return this.primitiveTypes[type]
294 | }
295 | }
296 |
297 | wrapCode (code, args = []) {
298 | if (args.length > 0) return '(buffer, offset, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
299 | return '(buffer, offset) => {\n' + this.indent(code) + '\n}'
300 | }
301 |
302 | callType (type, offsetExpr = 'offset', args = []) {
303 | if (type instanceof Array) {
304 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
305 | return this.callType(type[0], offsetExpr, Object.values(type[1]))
306 | }
307 | }
308 | if (type instanceof Array && type[0] === 'container') this.scopeStack.push({})
309 | const code = this.compileType(type)
310 | if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
311 | if (args.length > 0) return '(' + code + `)(buffer, ${offsetExpr}, ` + args.map(name => this.getField(name)).join(', ') + ')'
312 | return '(' + code + `)(buffer, ${offsetExpr})`
313 | }
314 | }
315 |
316 | class WriteCompiler extends Compiler {
317 | constructor () {
318 | super()
319 |
320 | this.addTypes(conditionalDatatypes.Write)
321 | this.addTypes(structuresDatatypes.Write)
322 | this.addTypes(utilsDatatypes.Write)
323 |
324 | // Add default types
325 | for (const key in numeric) {
326 | this.addNativeType(key, numeric[key][1])
327 | }
328 | for (const key in utils) {
329 | this.addNativeType(key, utils[key][1])
330 | }
331 | }
332 |
333 | compileType (type) {
334 | if (type instanceof Array) {
335 | if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
336 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
337 | return this.wrapCode('return ' + this.callType('value', type[0], 'offset', Object.values(type[1])))
338 | }
339 | throw new Error('Unknown parametrizable type: ' + type[0])
340 | } else { // Primitive type
341 | if (type === 'native') return 'null'
342 | if (this.types[type]) { return 'ctx.' + type }
343 | return this.primitiveTypes[type]
344 | }
345 | }
346 |
347 | wrapCode (code, args = []) {
348 | if (args.length > 0) return '(value, buffer, offset, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
349 | return '(value, buffer, offset) => {\n' + this.indent(code) + '\n}'
350 | }
351 |
352 | callType (value, type, offsetExpr = 'offset', args = []) {
353 | if (type instanceof Array) {
354 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
355 | return this.callType(value, type[0], offsetExpr, Object.values(type[1]))
356 | }
357 | }
358 | if (type instanceof Array && type[0] === 'container') this.scopeStack.push({})
359 | const code = this.compileType(type)
360 | if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
361 | if (args.length > 0) return '(' + code + `)(${value}, buffer, ${offsetExpr}, ` + args.map(name => this.getField(name)).join(', ') + ')'
362 | return '(' + code + `)(${value}, buffer, ${offsetExpr})`
363 | }
364 | }
365 |
366 | class SizeOfCompiler extends Compiler {
367 | constructor () {
368 | super()
369 |
370 | this.addTypes(conditionalDatatypes.SizeOf)
371 | this.addTypes(structuresDatatypes.SizeOf)
372 | this.addTypes(utilsDatatypes.SizeOf)
373 |
374 | // Add default types
375 | for (const key in numeric) {
376 | this.addNativeType(key, numeric[key][2])
377 | }
378 | for (const key in utils) {
379 | this.addNativeType(key, utils[key][2])
380 | }
381 | }
382 |
383 | /**
384 | * A native type is a type read or written by a function that will be called in it's
385 | * original context.
386 | * @param {*} type
387 | * @param {*} fn
388 | */
389 | addNativeType (type, fn) {
390 | this.primitiveTypes[type] = `native.${type}`
391 | if (!isNaN(fn)) {
392 | this.native[type] = (value) => { return fn }
393 | } else {
394 | this.native[type] = fn
395 | }
396 | this.types[type] = 'native'
397 | }
398 |
399 | compileType (type) {
400 | if (type instanceof Array) {
401 | if (this.parameterizableTypes[type[0]]) { return this.parameterizableTypes[type[0]](this, type[1]) }
402 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
403 | return this.wrapCode('return ' + this.callType('value', type[0], Object.values(type[1])))
404 | }
405 | throw new Error('Unknown parametrizable type: ' + type[0])
406 | } else { // Primitive type
407 | if (type === 'native') return 'null'
408 | if (!isNaN(this.primitiveTypes[type])) return this.primitiveTypes[type]
409 | if (this.types[type]) { return 'ctx.' + type }
410 | return this.primitiveTypes[type]
411 | }
412 | }
413 |
414 | wrapCode (code, args = []) {
415 | if (args.length > 0) return '(value, ' + args.join(', ') + ') => {\n' + this.indent(code) + '\n}'
416 | return '(value) => {\n' + this.indent(code) + '\n}'
417 | }
418 |
419 | callType (value, type, args = []) {
420 | if (type instanceof Array) {
421 | if (this.types[type[0]] && this.types[type[0]] !== 'native') {
422 | return this.callType(value, type[0], Object.values(type[1]))
423 | }
424 | }
425 | if (type instanceof Array && type[0] === 'container') this.scopeStack.push({})
426 | const code = this.compileType(type)
427 | if (type instanceof Array && type[0] === 'container') this.scopeStack.pop()
428 | if (!isNaN(code)) return code
429 | if (args.length > 0) return '(' + code + `)(${value}, ` + args.map(name => this.getField(name)).join(', ') + ')'
430 | return '(' + code + `)(${value})`
431 | }
432 | }
433 |
434 | module.exports = {
435 | ReadCompiler,
436 | WriteCompiler,
437 | SizeOfCompiler,
438 | ProtoDefCompiler,
439 | CompiledProtodef
440 | }
441 |
--------------------------------------------------------------------------------
/src/datatypes/compiler-conditional.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | Read: {
3 | switch: ['parametrizable', (compiler, struct) => {
4 | let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
5 | const args = []
6 | if (compare.startsWith('$')) args.push(compare)
7 | else if (struct.compareTo) {
8 | compare = compiler.getField(compare, true)
9 | }
10 | let code = `switch (${compare}) {\n`
11 | for (const key in struct.fields) {
12 | let val = key
13 | if (val.startsWith('/')) val = 'ctx.' + val.slice(1) // Root context variable
14 | else if (isNaN(val) && val !== 'true' && val !== 'false') val = `"${val}"`
15 | code += compiler.indent(`case ${val}: return ` + compiler.callType(struct.fields[key])) + '\n'
16 | }
17 | code += compiler.indent('default: return ' + compiler.callType(struct.default ? struct.default : 'void')) + '\n'
18 | code += '}'
19 | return compiler.wrapCode(code, args)
20 | }],
21 | option: ['parametrizable', (compiler, type) => {
22 | let code = 'const {value} = ctx.bool(buffer, offset)\n'
23 | code += 'if (value) {\n'
24 | code += ' const { value, size } = ' + compiler.callType(type, 'offset + 1') + '\n'
25 | code += ' return { value, size: size + 1 }\n'
26 | code += '}\n'
27 | code += 'return { value: undefined, size: 1}'
28 | return compiler.wrapCode(code)
29 | }]
30 | },
31 |
32 | Write: {
33 | switch: ['parametrizable', (compiler, struct) => {
34 | let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
35 | const args = []
36 | if (compare.startsWith('$')) args.push(compare)
37 | else if (struct.compareTo) {
38 | compare = compiler.getField(compare, true)
39 | }
40 | let code = `switch (${compare}) {\n`
41 | for (const key in struct.fields) {
42 | let val = key
43 | if (val.startsWith('/')) val = 'ctx.' + val.slice(1) // Root context variable
44 | else if (isNaN(val) && val !== 'true' && val !== 'false') val = `"${val}"`
45 | code += compiler.indent(`case ${val}: return ` + compiler.callType('value', struct.fields[key])) + '\n'
46 | }
47 | code += compiler.indent('default: return ' + compiler.callType('value', struct.default ? struct.default : 'void')) + '\n'
48 | code += '}'
49 | return compiler.wrapCode(code, args)
50 | }],
51 | option: ['parametrizable', (compiler, type) => {
52 | let code = 'if (value != null) {\n'
53 | code += ' offset = ctx.bool(1, buffer, offset)\n'
54 | code += ' offset = ' + compiler.callType('value', type) + '\n'
55 | code += '} else {\n'
56 | code += ' offset = ctx.bool(0, buffer, offset)\n'
57 | code += '}\n'
58 | code += 'return offset'
59 | return compiler.wrapCode(code)
60 | }]
61 | },
62 |
63 | SizeOf: {
64 | switch: ['parametrizable', (compiler, struct) => {
65 | let compare = struct.compareTo ? struct.compareTo : struct.compareToValue
66 | const args = []
67 | if (compare.startsWith('$')) args.push(compare)
68 | else if (struct.compareTo) {
69 | compare = compiler.getField(compare, true)
70 | }
71 | let code = `switch (${compare}) {\n`
72 | for (const key in struct.fields) {
73 | let val = key
74 | if (val.startsWith('/')) val = 'ctx.' + val.slice(1) // Root context variable
75 | else if (isNaN(val) && val !== 'true' && val !== 'false') val = `"${val}"`
76 | code += compiler.indent(`case ${val}: return ` + compiler.callType('value', struct.fields[key])) + '\n'
77 | }
78 | code += compiler.indent('default: return ' + compiler.callType('value', struct.default ? struct.default : 'void')) + '\n'
79 | code += '}'
80 | return compiler.wrapCode(code, args)
81 | }],
82 | option: ['parametrizable', (compiler, type) => {
83 | let code = 'if (value != null) {\n'
84 | code += ' return 1 + ' + compiler.callType('value', type) + '\n'
85 | code += '}\n'
86 | code += 'return 1'
87 | return compiler.wrapCode(code)
88 | }]
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/src/datatypes/compiler-structures.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | Read: {
3 | array: ['parametrizable', (compiler, array) => {
4 | let code = ''
5 | if (array.countType) {
6 | code += 'const { value: count, size: countSize } = ' + compiler.callType(array.countType) + '\n'
7 | } else if (array.count) {
8 | code += 'const count = ' + array.count + '\n'
9 | code += 'const countSize = 0\n'
10 | } else {
11 | throw new Error('Array must contain either count or countType')
12 | }
13 | code += 'if (count > 0xffffff && !ctx.noArraySizeCheck) throw new Error("array size is abnormally large, not reading: " + count)\n'
14 | code += 'const data = []\n'
15 | code += 'let size = countSize\n'
16 | code += 'for (let i = 0; i < count; i++) {\n'
17 | code += ' const elem = ' + compiler.callType(array.type, 'offset + size') + '\n'
18 | code += ' data.push(elem.value)\n'
19 | code += ' size += elem.size\n'
20 | code += '}\n'
21 | code += 'return { value: data, size }'
22 | return compiler.wrapCode(code)
23 | }],
24 | count: ['parametrizable', (compiler, type) => {
25 | const code = 'return ' + compiler.callType(type.type)
26 | return compiler.wrapCode(code)
27 | }],
28 | container: ['parametrizable', (compiler, values) => {
29 | values = containerInlining(values)
30 |
31 | let code = ''
32 | let offsetExpr = 'offset'
33 | const names = []
34 | for (const i in values) {
35 | const { type, name, anon, _shouldBeInlined } = values[i]
36 | let trueName
37 | let sizeName
38 | if (type instanceof Array && type[0] === 'bitfield' && anon) {
39 | const subnames = []
40 | for (const { name } of type[1]) {
41 | const trueName = compiler.getField(name)
42 | if (name === trueName) {
43 | names.push(name)
44 | subnames.push(name)
45 | } else {
46 | names.push(`${name}: ${trueName}`)
47 | subnames.push(`${name}: ${trueName}`)
48 | }
49 | }
50 | trueName = '{' + subnames.join(', ') + '}'
51 | sizeName = `anon${i}Size`
52 | } else {
53 | trueName = compiler.getField(name)
54 | sizeName = `${trueName}Size`
55 | if (_shouldBeInlined) names.push('...' + name)
56 | else if (name === trueName) names.push(name)
57 | else names.push(`${name}: ${trueName}`)
58 | }
59 | code += `let { value: ${trueName}, size: ${sizeName} } = ` + compiler.callType(type, offsetExpr) + '\n'
60 | offsetExpr += ` + ${sizeName}`
61 | }
62 | const sizes = offsetExpr.split(' + ')
63 | sizes.shift()
64 | if (sizes.length === 0) sizes.push('0')
65 | code += 'return { value: { ' + names.join(', ') + ' }, size: ' + sizes.join(' + ') + '}'
66 | return compiler.wrapCode(code)
67 | }]
68 | },
69 |
70 | Write: {
71 | array: ['parametrizable', (compiler, array) => {
72 | let code = ''
73 | if (array.countType) {
74 | code += 'offset = ' + compiler.callType('value.length', array.countType) + '\n'
75 | } else if (array.count === null) {
76 | throw new Error('Array must contain either count or countType')
77 | }
78 | code += 'for (let i = 0; i < value.length; i++) {\n'
79 | code += ' offset = ' + compiler.callType('value[i]', array.type) + '\n'
80 | code += '}\n'
81 | code += 'return offset'
82 | return compiler.wrapCode(code)
83 | }],
84 | count: ['parametrizable', (compiler, type) => {
85 | const code = 'return ' + compiler.callType('value', type.type)
86 | return compiler.wrapCode(code)
87 | }],
88 | container: ['parametrizable', (compiler, values) => {
89 | values = containerInlining(values)
90 | let code = ''
91 | for (const i in values) {
92 | const { type, name, anon, _shouldBeInlined } = values[i]
93 | let trueName
94 | if (type instanceof Array && type[0] === 'bitfield' && anon) {
95 | const names = []
96 | for (const { name } of type[1]) {
97 | const trueName = compiler.getField(name)
98 | code += `const ${trueName} = value.${name}\n`
99 | if (name === trueName) names.push(name)
100 | else names.push(`${name}: ${trueName}`)
101 | }
102 | trueName = '{' + names.join(', ') + '}'
103 | } else {
104 | trueName = compiler.getField(name)
105 | if (_shouldBeInlined) code += `let ${name} = value\n`
106 | else code += `let ${trueName} = value.${name}\n`
107 | }
108 | code += 'offset = ' + compiler.callType(trueName, type) + '\n'
109 | }
110 | code += 'return offset'
111 | return compiler.wrapCode(code)
112 | }]
113 | },
114 |
115 | SizeOf: {
116 | array: ['parametrizable', (compiler, array) => {
117 | let code = ''
118 | if (array.countType) {
119 | code += 'let size = ' + compiler.callType('value.length', array.countType) + '\n'
120 | } else if (array.count) {
121 | code += 'let size = 0\n'
122 | } else {
123 | throw new Error('Array must contain either count or countType')
124 | }
125 | if (!isNaN(compiler.callType('value[i]', array.type))) {
126 | code += 'size += value.length * ' + compiler.callType('value[i]', array.type) + '\n'
127 | } else {
128 | code += 'for (let i = 0; i < value.length; i++) {\n'
129 | code += ' size += ' + compiler.callType('value[i]', array.type) + '\n'
130 | code += '}\n'
131 | }
132 | code += 'return size'
133 | return compiler.wrapCode(code)
134 | }],
135 | count: ['parametrizable', (compiler, type) => {
136 | const code = 'return ' + compiler.callType('value', type.type)
137 | return compiler.wrapCode(code)
138 | }],
139 | container: ['parametrizable', (compiler, values) => {
140 | values = containerInlining(values)
141 | let code = 'let size = 0\n'
142 | for (const i in values) {
143 | const { type, name, anon, _shouldBeInlined } = values[i]
144 | let trueName
145 | if (type instanceof Array && type[0] === 'bitfield' && anon) {
146 | const names = []
147 | for (const { name } of type[1]) {
148 | const trueName = compiler.getField(name)
149 | code += `const ${trueName} = value.${name}\n`
150 | if (name === trueName) names.push(name)
151 | else names.push(`${name}: ${trueName}`)
152 | }
153 | trueName = '{' + names.join(', ') + '}'
154 | } else {
155 | trueName = compiler.getField(name)
156 | if (_shouldBeInlined) code += `let ${name} = value\n`
157 | else code += `let ${trueName} = value.${name}\n`
158 | }
159 | code += 'size += ' + compiler.callType(trueName, type) + '\n'
160 | }
161 | code += 'return size'
162 | return compiler.wrapCode(code)
163 | }]
164 | }
165 | }
166 |
167 | function uniqueId () {
168 | return '_' + Math.random().toString(36).substr(2, 9)
169 | }
170 |
171 | function containerInlining (values) {
172 | // Inlining (support only 1 level)
173 | const newValues = []
174 | for (const i in values) {
175 | const { type, anon } = values[i]
176 | if (anon && !(type instanceof Array && type[0] === 'bitfield')) {
177 | if (type instanceof Array && type[0] === 'container') {
178 | for (const j in type[1]) newValues.push(type[1][j])
179 | } else if (type instanceof Array && type[0] === 'switch') {
180 | newValues.push({
181 | name: uniqueId(),
182 | _shouldBeInlined: true,
183 | type
184 | })
185 | } else {
186 | throw new Error('Cannot inline anonymous type: ' + type)
187 | }
188 | } else {
189 | newValues.push(values[i])
190 | }
191 | }
192 | return newValues
193 | }
194 |
--------------------------------------------------------------------------------
/src/datatypes/compiler-utils.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | Read: {
3 | pstring: ['parametrizable', (compiler, string) => {
4 | let code = ''
5 | if (string.countType) {
6 | code += 'const { value: count, size: countSize } = ' + compiler.callType(string.countType) + '\n'
7 | } else if (string.count) {
8 | code += 'const count = ' + string.count + '\n'
9 | code += 'const countSize = 0\n'
10 | } else {
11 | throw new Error('pstring must contain either count or countType')
12 | }
13 | code += 'offset += countSize\n'
14 | code += 'if (offset + count > buffer.length) {\n'
15 | code += ' throw new PartialReadError("Missing characters in string, found size is " + buffer.length + " expected size was " + (offset + count))\n'
16 | code += '}\n'
17 | code += `return { value: buffer.toString("${string.encoding || 'utf8'}", offset, offset + count), size: count + countSize }`
18 | return compiler.wrapCode(code)
19 | }],
20 | buffer: ['parametrizable', (compiler, buffer) => {
21 | let code = ''
22 | if (buffer.countType) {
23 | code += 'const { value: count, size: countSize } = ' + compiler.callType(buffer.countType) + '\n'
24 | } else if (buffer.count) {
25 | code += 'const count = ' + buffer.count + '\n'
26 | code += 'const countSize = 0\n'
27 | } else {
28 | throw new Error('buffer must contain either count or countType')
29 | }
30 | code += 'offset += countSize\n'
31 | code += 'if (offset + count > buffer.length) {\n'
32 | code += ' throw new PartialReadError()\n'
33 | code += '}\n'
34 | code += 'return { value: buffer.slice(offset, offset + count), size: count + countSize }'
35 | return compiler.wrapCode(code)
36 | }],
37 | bitfield: ['parametrizable', (compiler, values) => {
38 | let code = ''
39 | const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
40 | code += `if ( offset + ${totalBytes} > buffer.length) { throw new PartialReadError() }\n`
41 |
42 | const names = []
43 | let totalSize = 8
44 | code += 'let bits = buffer[offset++]\n'
45 | for (const i in values) {
46 | const { name, size, signed } = values[i]
47 | const trueName = compiler.getField(name)
48 | while (totalSize < size) {
49 | totalSize += 8
50 | code += 'bits = (bits << 8) | buffer[offset++]\n'
51 | }
52 | code += `let ${trueName} = (bits >> ` + (totalSize - size) + ') & 0x' + ((1 << size) - 1).toString(16) + '\n'
53 | if (signed) code += `${trueName} -= (${trueName} & 0x` + (1 << (size - 1)).toString(16) + ') << 1\n'
54 | totalSize -= size
55 | if (name === trueName) names.push(name)
56 | else names.push(`${name}: ${trueName}`)
57 | }
58 | code += 'return { value: { ' + names.join(', ') + ` }, size: ${totalBytes} }`
59 | return compiler.wrapCode(code)
60 | }],
61 | bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
62 | let fstr = JSON.stringify(flags)
63 | if (Array.isArray(flags)) {
64 | fstr = '{'
65 | for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
66 | fstr += '}'
67 | } else if (shift) {
68 | fstr = '{'
69 | for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
70 | fstr += '}'
71 | }
72 | return compiler.wrapCode(`
73 | const { value: _value, size } = ${compiler.callType(type, 'offset')}
74 | const value = { _value }
75 | const flags = ${fstr}
76 | for (const key in flags) {
77 | value[key] = (_value & flags[key]) == flags[key]
78 | }
79 | return { value, size }
80 | `.trim())
81 | }],
82 | mapper: ['parametrizable', (compiler, mapper) => {
83 | let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n'
84 | code += 'return { value: ' + JSON.stringify(sanitizeMappings(mapper.mappings)) + '[value] || value, size }'
85 | return compiler.wrapCode(code)
86 | }]
87 | },
88 |
89 | Write: {
90 | pstring: ['parametrizable', (compiler, string) => {
91 | let code = `const length = Buffer.byteLength(value, "${string.encoding || 'utf8'}")\n`
92 | if (string.countType) {
93 | code += 'offset = ' + compiler.callType('length', string.countType) + '\n'
94 | } else if (string.count === null) {
95 | throw new Error('pstring must contain either count or countType')
96 | }
97 | code += `buffer.write(value, offset, length, "${string.encoding || 'utf8'}")\n`
98 | code += 'return offset + length'
99 | return compiler.wrapCode(code)
100 | }],
101 | buffer: ['parametrizable', (compiler, buffer) => {
102 | let code = 'if (!(value instanceof Buffer)) value = Buffer.from(value)\n'
103 | if (buffer.countType) {
104 | code += 'offset = ' + compiler.callType('value.length', buffer.countType) + '\n'
105 | } else if (buffer.count === null) {
106 | throw new Error('buffer must contain either count or countType')
107 | }
108 | code += 'value.copy(buffer, offset)\n'
109 | code += 'return offset + value.length'
110 | return compiler.wrapCode(code)
111 | }],
112 | bitfield: ['parametrizable', (compiler, values) => {
113 | let toWrite = ''
114 | let bits = 0
115 | let code = ''
116 | for (const i in values) {
117 | let { name, size } = values[i]
118 | const trueName = compiler.getField(name)
119 | code += `let ${trueName} = value.${name}\n`
120 | while (size > 0) {
121 | const writeBits = Math.min(8 - bits, size)
122 | const mask = ((1 << writeBits) - 1)
123 | if (toWrite !== '') toWrite = `((${toWrite}) << ${writeBits}) | `
124 | toWrite += `((${trueName} >> ` + (size - writeBits) + ') & 0x' + mask.toString(16) + ')'
125 | size -= writeBits
126 | bits += writeBits
127 | if (bits === 8) {
128 | code += 'buffer[offset++] = ' + toWrite + '\n'
129 | bits = 0
130 | toWrite = ''
131 | }
132 | }
133 | }
134 | if (bits !== 0) {
135 | code += 'buffer[offset++] = (' + toWrite + ') << ' + (8 - bits) + '\n'
136 | }
137 | code += 'return offset'
138 | return compiler.wrapCode(code)
139 | }],
140 | bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
141 | let fstr = JSON.stringify(flags)
142 | if (Array.isArray(flags)) {
143 | fstr = '{'
144 | for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
145 | fstr += '}'
146 | } else if (shift) {
147 | fstr = '{'
148 | for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
149 | fstr += '}'
150 | }
151 | return compiler.wrapCode(`
152 | const flags = ${fstr}
153 | let val = value._value ${big ? '|| 0n' : ''}
154 | for (const key in flags) {
155 | if (value[key]) val |= flags[key]
156 | }
157 | return (ctx.${type})(val, buffer, offset)
158 | `.trim())
159 | }],
160 | mapper: ['parametrizable', (compiler, mapper) => {
161 | const mappings = JSON.stringify(swapMappings(mapper.mappings))
162 | const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type)
163 | return compiler.wrapCode(code)
164 | }]
165 | },
166 |
167 | SizeOf: {
168 | pstring: ['parametrizable', (compiler, string) => {
169 | let code = `let size = Buffer.byteLength(value, "${string.encoding || 'utf8'}")\n`
170 | if (string.countType) {
171 | code += 'size += ' + compiler.callType('size', string.countType) + '\n'
172 | } else if (string.count === null) {
173 | throw new Error('pstring must contain either count or countType')
174 | }
175 | code += 'return size'
176 | return compiler.wrapCode(code)
177 | }],
178 | buffer: ['parametrizable', (compiler, buffer) => {
179 | let code = 'let size = value instanceof Buffer ? value.length : Buffer.from(value).length\n'
180 | if (buffer.countType) {
181 | code += 'size += ' + compiler.callType('size', buffer.countType) + '\n'
182 | } else if (buffer.count === null) {
183 | throw new Error('buffer must contain either count or countType')
184 | }
185 | code += 'return size'
186 | return compiler.wrapCode(code)
187 | }],
188 | bitfield: ['parametrizable', (compiler, values) => {
189 | const totalBytes = Math.ceil(values.reduce((acc, { size }) => acc + size, 0) / 8)
190 | return `${totalBytes}`
191 | }],
192 | bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => {
193 | let fstr = JSON.stringify(flags)
194 | if (Array.isArray(flags)) {
195 | fstr = '{'
196 | for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',')
197 | fstr += '}'
198 | } else if (shift) {
199 | fstr = '{'
200 | for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}`
201 | fstr += '}'
202 | }
203 | return compiler.wrapCode(`
204 | const flags = ${fstr}
205 | let val = value._value ${big ? '|| 0n' : ''}
206 | for (const key in flags) {
207 | if (value[key]) val |= flags[key]
208 | }
209 | return (ctx.${type})(val)
210 | `.trim())
211 | }],
212 | mapper: ['parametrizable', (compiler, mapper) => {
213 | const mappings = JSON.stringify(swapMappings(mapper.mappings))
214 | const code = 'return ' + compiler.callType(`${mappings}[value] || value`, mapper.type)
215 | return compiler.wrapCode(code)
216 | }]
217 | }
218 | }
219 |
220 | // Convert hexadecimal keys to decimal
221 | function sanitizeMappings (json) {
222 | const ret = {}
223 | for (let key in json) {
224 | let val = json[key]
225 | key = hex2dec(key)
226 | if (!isNaN(val)) val = Number(val)
227 | if (val === 'true') val = true
228 | if (val === 'false') val = false
229 | ret[key] = val
230 | }
231 | return ret
232 | }
233 |
234 | function swapMappings (json) {
235 | const ret = {}
236 | for (let key in json) {
237 | const val = json[key]
238 | key = hex2dec(key)
239 | ret[val] = (isNaN(key)) ? key : parseInt(key, 10)
240 | }
241 | return ret
242 | }
243 |
244 | function hex2dec (num) {
245 | if ((num.match(/^0x[0-9a-f]+$/i))) { return parseInt(num.substring(2), 16) }
246 | return num
247 | }
248 |
--------------------------------------------------------------------------------
/src/datatypes/conditional.js:
--------------------------------------------------------------------------------
1 | const { getField, getFieldInfo, tryDoc, PartialReadError } = require('../utils')
2 |
3 | module.exports = {
4 | switch: [readSwitch, writeSwitch, sizeOfSwitch, require('../../ProtoDef/schemas/conditional.json').switch],
5 | option: [readOption, writeOption, sizeOfOption, require('../../ProtoDef/schemas/conditional.json').option]
6 | }
7 |
8 | function readSwitch (buffer, offset, { compareTo, fields, compareToValue, default: defVal }, rootNode) {
9 | compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
10 | if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
11 | for (const field in fields) {
12 | if (field.startsWith('/')) {
13 | fields[this.types[field.slice(1)]] = fields[field]
14 | delete fields[field]
15 | }
16 | }
17 | const caseDefault = typeof fields[compareTo] === 'undefined'
18 | const resultingType = caseDefault ? defVal : fields[compareTo]
19 | const fieldInfo = getFieldInfo(resultingType)
20 | return tryDoc(() => this.read(buffer, offset, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
21 | }
22 |
23 | function writeSwitch (value, buffer, offset, { compareTo, fields, compareToValue, default: defVal }, rootNode) {
24 | compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
25 | if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
26 | for (const field in fields) {
27 | if (field.startsWith('/')) {
28 | fields[this.types[field.slice(1)]] = fields[field]
29 | delete fields[field]
30 | }
31 | }
32 | const caseDefault = typeof fields[compareTo] === 'undefined'
33 | const fieldInfo = getFieldInfo(caseDefault ? defVal : fields[compareTo])
34 | return tryDoc(() => this.write(value, buffer, offset, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
35 | }
36 |
37 | function sizeOfSwitch (value, { compareTo, fields, compareToValue, default: defVal }, rootNode) {
38 | compareTo = compareToValue !== undefined ? compareToValue : getField(compareTo, rootNode)
39 | if (typeof fields[compareTo] === 'undefined' && typeof defVal === 'undefined') { throw new Error(compareTo + ' has no associated fieldInfo in switch') }
40 | for (const field in fields) {
41 | if (field.startsWith('/')) {
42 | fields[this.types[field.slice(1)]] = fields[field]
43 | delete fields[field]
44 | }
45 | }
46 | const caseDefault = typeof fields[compareTo] === 'undefined'
47 | const fieldInfo = getFieldInfo(caseDefault ? defVal : fields[compareTo])
48 | return tryDoc(() => this.sizeOf(value, fieldInfo, rootNode), caseDefault ? 'default' : compareTo)
49 | }
50 |
51 | function readOption (buffer, offset, typeArgs, context) {
52 | if (buffer.length < offset + 1) { throw new PartialReadError() }
53 | const val = buffer.readUInt8(offset++)
54 | if (val !== 0) {
55 | const retval = this.read(buffer, offset, typeArgs, context)
56 | retval.size++
57 | return retval
58 | } else { return { size: 1 } }
59 | }
60 |
61 | function writeOption (value, buffer, offset, typeArgs, context) {
62 | if (value != null) {
63 | buffer.writeUInt8(1, offset++)
64 | offset = this.write(value, buffer, offset, typeArgs, context)
65 | } else { buffer.writeUInt8(0, offset++) }
66 | return offset
67 | }
68 |
69 | function sizeOfOption (value, typeArgs, context) {
70 | return value == null ? 1 : this.sizeOf(value, typeArgs, context) + 1
71 | }
72 |
--------------------------------------------------------------------------------
/src/datatypes/numeric.js:
--------------------------------------------------------------------------------
1 | const { PartialReadError } = require('../utils')
2 |
3 | class SignedBigInt extends Array {
4 | valueOf () { return BigInt.asIntN(64, BigInt(this[0]) << 32n) | BigInt.asUintN(32, BigInt(this[1])) }
5 | toString () { return this.valueOf().toString() }
6 | [Symbol.for('nodejs.util.inspect.custom')] () { return this.valueOf() }
7 | }
8 |
9 | class UnsignedBigInt extends Array {
10 | valueOf () { return BigInt.asUintN(64, BigInt(this[0]) << 32n) | BigInt.asUintN(32, BigInt(this[1])) }
11 | toString () { return this.valueOf().toString() }
12 | [Symbol.for('nodejs.util.inspect.custom')] () { return this.valueOf() }
13 | }
14 |
15 | function readI64 (buffer, offset) {
16 | if (offset + 8 > buffer.length) { throw new PartialReadError() }
17 | return {
18 | value: new SignedBigInt(buffer.readInt32BE(offset), buffer.readInt32BE(offset + 4)),
19 | size: 8
20 | }
21 | }
22 |
23 | function writeI64 (value, buffer, offset) {
24 | if (typeof value === 'bigint') {
25 | buffer.writeBigInt64BE(value, offset)
26 | } else {
27 | buffer.writeInt32BE(value[0], offset)
28 | buffer.writeInt32BE(value[1], offset + 4)
29 | }
30 | return offset + 8
31 | }
32 |
33 | function readLI64 (buffer, offset) {
34 | if (offset + 8 > buffer.length) { throw new PartialReadError() }
35 | return {
36 | value: new SignedBigInt(buffer.readInt32LE(offset + 4), buffer.readInt32LE(offset)),
37 | size: 8
38 | }
39 | }
40 |
41 | function writeLI64 (value, buffer, offset) {
42 | if (typeof value === 'bigint') {
43 | buffer.writeBigInt64LE(value, offset)
44 | } else {
45 | buffer.writeInt32LE(value[0], offset + 4)
46 | buffer.writeInt32LE(value[1], offset)
47 | }
48 | return offset + 8
49 | }
50 |
51 | function readU64 (buffer, offset) {
52 | if (offset + 8 > buffer.length) { throw new PartialReadError() }
53 | return {
54 | value: new UnsignedBigInt(buffer.readUInt32BE(offset), buffer.readUInt32BE(offset + 4)),
55 | size: 8
56 | }
57 | }
58 |
59 | function writeU64 (value, buffer, offset) {
60 | if (typeof value === 'bigint') {
61 | buffer.writeBigUInt64BE(value, offset)
62 | } else {
63 | buffer.writeUInt32BE(value[0], offset)
64 | buffer.writeUInt32BE(value[1], offset + 4)
65 | }
66 | return offset + 8
67 | }
68 |
69 | function readLU64 (buffer, offset) {
70 | if (offset + 8 > buffer.length) { throw new PartialReadError() }
71 | return {
72 | value: new UnsignedBigInt(buffer.readUInt32LE(offset + 4), buffer.readUInt32LE(offset)),
73 | size: 8
74 | }
75 | }
76 |
77 | function writeLU64 (value, buffer, offset) {
78 | if (typeof value === 'bigint') {
79 | buffer.writeBigUInt64LE(value, offset)
80 | } else {
81 | buffer.writeUInt32LE(value[0], offset + 4)
82 | buffer.writeUInt32LE(value[1], offset)
83 | }
84 | return offset + 8
85 | }
86 |
87 | function generateFunctions (bufferReader, bufferWriter, size, schema) {
88 | const reader = (buffer, offset) => {
89 | if (offset + size > buffer.length) { throw new PartialReadError() }
90 | const value = buffer[bufferReader](offset)
91 | return {
92 | value,
93 | size
94 | }
95 | }
96 | const writer = (value, buffer, offset) => {
97 | buffer[bufferWriter](value, offset)
98 | return offset + size
99 | }
100 | return [reader, writer, size, schema]
101 | }
102 |
103 | const nums = {
104 | i8: ['readInt8', 'writeInt8', 1],
105 | u8: ['readUInt8', 'writeUInt8', 1],
106 | i16: ['readInt16BE', 'writeInt16BE', 2],
107 | u16: ['readUInt16BE', 'writeUInt16BE', 2],
108 | i32: ['readInt32BE', 'writeInt32BE', 4],
109 | u32: ['readUInt32BE', 'writeUInt32BE', 4],
110 | f32: ['readFloatBE', 'writeFloatBE', 4],
111 | f64: ['readDoubleBE', 'writeDoubleBE', 8],
112 | li8: ['readInt8', 'writeInt8', 1],
113 | lu8: ['readUInt8', 'writeUInt8', 1],
114 | li16: ['readInt16LE', 'writeInt16LE', 2],
115 | lu16: ['readUInt16LE', 'writeUInt16LE', 2],
116 | li32: ['readInt32LE', 'writeInt32LE', 4],
117 | lu32: ['readUInt32LE', 'writeUInt32LE', 4],
118 | lf32: ['readFloatLE', 'writeFloatLE', 4],
119 | lf64: ['readDoubleLE', 'writeDoubleLE', 8]
120 | }
121 |
122 | const types = Object.keys(nums).reduce((types, num) => {
123 | types[num] = generateFunctions(nums[num][0], nums[num][1], nums[num][2], require('../../ProtoDef/schemas/numeric.json')[num])
124 | return types
125 | }, {})
126 | types.i64 = [readI64, writeI64, 8, require('../../ProtoDef/schemas/numeric.json').i64]
127 | types.li64 = [readLI64, writeLI64, 8, require('../../ProtoDef/schemas/numeric.json').li64]
128 | types.u64 = [readU64, writeU64, 8, require('../../ProtoDef/schemas/numeric.json').u64]
129 | types.lu64 = [readLU64, writeLU64, 8, require('../../ProtoDef/schemas/numeric.json').lu64]
130 |
131 | module.exports = types
132 |
--------------------------------------------------------------------------------
/src/datatypes/structures.js:
--------------------------------------------------------------------------------
1 | const { getField, getCount, sendCount, calcCount, tryDoc } = require('../utils')
2 |
3 | module.exports = {
4 | array: [readArray, writeArray, sizeOfArray, require('../../ProtoDef/schemas/structures.json').array],
5 | count: [readCount, writeCount, sizeOfCount, require('../../ProtoDef/schemas/structures.json').count],
6 | container: [readContainer, writeContainer, sizeOfContainer, require('../../ProtoDef/schemas/structures.json').container]
7 | }
8 |
9 | function readArray (buffer, offset, typeArgs, rootNode) {
10 | const results = {
11 | value: [],
12 | size: 0
13 | }
14 | let value
15 | let { count, size } = getCount.call(this, buffer, offset, typeArgs, rootNode)
16 | offset += size
17 | results.size += size
18 | for (let i = 0; i < count; i++) {
19 | ({ size, value } = tryDoc(() => this.read(buffer, offset, typeArgs.type, rootNode), i))
20 | results.size += size
21 | offset += size
22 | results.value.push(value)
23 | }
24 | return results
25 | }
26 |
27 | function writeArray (value, buffer, offset, typeArgs, rootNode) {
28 | offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
29 | return value.reduce((offset, v, index) => tryDoc(() => this.write(v, buffer, offset, typeArgs.type, rootNode), index), offset)
30 | }
31 |
32 | function sizeOfArray (value, typeArgs, rootNode) {
33 | let size = calcCount.call(this, value.length, typeArgs, rootNode)
34 | size = value.reduce((size, v, index) => tryDoc(() => size + this.sizeOf(v, typeArgs.type, rootNode), index), size)
35 | return size
36 | }
37 |
38 | function readContainer (buffer, offset, typeArgs, context) {
39 | const results = {
40 | value: { '..': context },
41 | size: 0
42 | }
43 | typeArgs.forEach(({ type, name, anon }) => {
44 | tryDoc(() => {
45 | const readResults = this.read(buffer, offset, type, results.value)
46 | results.size += readResults.size
47 | offset += readResults.size
48 | if (anon) {
49 | if (readResults.value !== undefined) {
50 | Object.keys(readResults.value).forEach(key => {
51 | results.value[key] = readResults.value[key]
52 | })
53 | }
54 | } else { results.value[name] = readResults.value }
55 | }, name || 'unknown')
56 | })
57 | delete results.value['..']
58 | return results
59 | }
60 |
61 | function writeContainer (value, buffer, offset, typeArgs, context) {
62 | value['..'] = context
63 | offset = typeArgs.reduce((offset, { type, name, anon }) =>
64 | tryDoc(() => this.write(anon ? value : value[name], buffer, offset, type, value), name || 'unknown'), offset)
65 | delete value['..']
66 | return offset
67 | }
68 |
69 | function sizeOfContainer (value, typeArgs, context) {
70 | value['..'] = context
71 | const size = typeArgs.reduce((size, { type, name, anon }) =>
72 | size + tryDoc(() => this.sizeOf(anon ? value : value[name], type, value), name || 'unknown'), 0)
73 | delete value['..']
74 | return size
75 | }
76 |
77 | function readCount (buffer, offset, { type }, rootNode) {
78 | return this.read(buffer, offset, type, rootNode)
79 | }
80 |
81 | function writeCount (value, buffer, offset, { countFor, type }, rootNode) {
82 | // Actually gets the required field, and writes its length. Value is unused.
83 | // TODO : a bit hackityhack.
84 | return this.write(getField(countFor, rootNode).length, buffer, offset, type, rootNode)
85 | }
86 |
87 | function sizeOfCount (value, { countFor, type }, rootNode) {
88 | // TODO : should I use value or getField().length ?
89 | return this.sizeOf(getField(countFor, rootNode).length, type, rootNode)
90 | }
91 |
--------------------------------------------------------------------------------
/src/datatypes/utils.js:
--------------------------------------------------------------------------------
1 | const { getCount, sendCount, calcCount, PartialReadError } = require('../utils')
2 |
3 | module.exports = {
4 | bool: [readBool, writeBool, 1, require('../../ProtoDef/schemas/utils.json').bool],
5 | pstring: [readPString, writePString, sizeOfPString, require('../../ProtoDef/schemas/utils.json').pstring],
6 | buffer: [readBuffer, writeBuffer, sizeOfBuffer, require('../../ProtoDef/schemas/utils.json').buffer],
7 | void: [readVoid, writeVoid, 0, require('../../ProtoDef/schemas/utils.json').void],
8 | bitfield: [readBitField, writeBitField, sizeOfBitField, require('../../ProtoDef/schemas/utils.json').bitfield],
9 | bitflags: [readBitflags, writeBitflags, sizeOfBitflags, require('../../ProtoDef/schemas/utils.json').bitflags],
10 | cstring: [readCString, writeCString, sizeOfCString, require('../../ProtoDef/schemas/utils.json').cstring],
11 | mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper],
12 | ...require('./varint')
13 | }
14 |
15 | function mapperEquality (a, b) {
16 | return a === b || parseInt(a) === parseInt(b)
17 | }
18 |
19 | function readMapper (buffer, offset, { type, mappings }, rootNode) {
20 | const { size, value } = this.read(buffer, offset, type, rootNode)
21 | let mappedValue = null
22 | const keys = Object.keys(mappings)
23 | for (let i = 0; i < keys.length; i++) {
24 | if (mapperEquality(keys[i], value)) {
25 | mappedValue = mappings[keys[i]]
26 | break
27 | }
28 | }
29 | if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
30 | return {
31 | size,
32 | value: mappedValue
33 | }
34 | }
35 |
36 | function writeMapper (value, buffer, offset, { type, mappings }, rootNode) {
37 | const keys = Object.keys(mappings)
38 | let mappedValue = null
39 | for (let i = 0; i < keys.length; i++) {
40 | if (mapperEquality(mappings[keys[i]], value)) {
41 | mappedValue = keys[i]
42 | break
43 | }
44 | }
45 | if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
46 | return this.write(mappedValue, buffer, offset, type, rootNode)
47 | }
48 |
49 | function sizeOfMapper (value, { type, mappings }, rootNode) {
50 | const keys = Object.keys(mappings)
51 | let mappedValue = null
52 | for (let i = 0; i < keys.length; i++) {
53 | if (mapperEquality(mappings[keys[i]], value)) {
54 | mappedValue = keys[i]
55 | break
56 | }
57 | }
58 | if (mappedValue == null) throw new Error(value + ' is not in the mappings value')
59 | return this.sizeOf(mappedValue, type, rootNode)
60 | }
61 |
62 | function readPString (buffer, offset, typeArgs, rootNode) {
63 | const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
64 | const cursor = offset + size
65 | const strEnd = cursor + count
66 | if (strEnd > buffer.length) {
67 | throw new PartialReadError('Missing characters in string, found size is ' + buffer.length +
68 | ' expected size was ' + strEnd)
69 | }
70 |
71 | return {
72 | value: buffer.toString(typeArgs.encoding || 'utf8', cursor, strEnd),
73 | size: strEnd - offset
74 | }
75 | }
76 |
77 | function writePString (value, buffer, offset, typeArgs, rootNode) {
78 | const length = Buffer.byteLength(value, 'utf8')
79 | offset = sendCount.call(this, length, buffer, offset, typeArgs, rootNode)
80 | buffer.write(value, offset, length, typeArgs.encoding || 'utf8')
81 | return offset + length
82 | }
83 |
84 | function sizeOfPString (value, typeArgs, rootNode) {
85 | const length = Buffer.byteLength(value, typeArgs.encoding || 'utf8')
86 | const size = calcCount.call(this, length, typeArgs, rootNode)
87 | return size + length
88 | }
89 |
90 | function readBool (buffer, offset) {
91 | if (offset + 1 > buffer.length) throw new PartialReadError()
92 | const value = buffer.readInt8(offset)
93 | return {
94 | value: !!value,
95 | size: 1
96 | }
97 | }
98 |
99 | function writeBool (value, buffer, offset) {
100 | buffer.writeInt8(+value, offset)
101 | return offset + 1
102 | }
103 |
104 | function readBuffer (buffer, offset, typeArgs, rootNode) {
105 | const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode)
106 | offset += size
107 | if (offset + count > buffer.length) throw new PartialReadError()
108 | return {
109 | value: buffer.slice(offset, offset + count),
110 | size: size + count
111 | }
112 | }
113 |
114 | function writeBuffer (value, buffer, offset, typeArgs, rootNode) {
115 | if (!(value instanceof Buffer)) value = Buffer.from(value)
116 | offset = sendCount.call(this, value.length, buffer, offset, typeArgs, rootNode)
117 | value.copy(buffer, offset)
118 | return offset + value.length
119 | }
120 |
121 | function sizeOfBuffer (value, typeArgs, rootNode) {
122 | if (!(value instanceof Buffer)) value = Buffer.from(value)
123 | const size = calcCount.call(this, value.length, typeArgs, rootNode)
124 | return size + value.length
125 | }
126 |
127 | function readVoid () {
128 | return {
129 | value: undefined,
130 | size: 0
131 | }
132 | }
133 |
134 | function writeVoid (value, buffer, offset) {
135 | return offset
136 | }
137 |
138 | function generateBitMask (n) {
139 | return (1 << n) - 1
140 | }
141 |
142 | function readBitField (buffer, offset, typeArgs) {
143 | const beginOffset = offset
144 | let curVal = null
145 | let bits = 0
146 | const results = {}
147 | results.value = typeArgs.reduce((acc, { size, signed, name }) => {
148 | let currentSize = size
149 | let val = 0
150 | while (currentSize > 0) {
151 | if (bits === 0) {
152 | if (buffer.length < offset + 1) { throw new PartialReadError() }
153 | curVal = buffer[offset++]
154 | bits = 8
155 | }
156 | const bitsToRead = Math.min(currentSize, bits)
157 | val = (val << bitsToRead) | (curVal & generateBitMask(bits)) >> (bits - bitsToRead)
158 | bits -= bitsToRead
159 | currentSize -= bitsToRead
160 | }
161 | if (signed && val >= 1 << (size - 1)) { val -= 1 << size }
162 | acc[name] = val
163 | return acc
164 | }, {})
165 | results.size = offset - beginOffset
166 | return results
167 | }
168 | function writeBitField (value, buffer, offset, typeArgs) {
169 | let toWrite = 0
170 | let bits = 0
171 | typeArgs.forEach(({ size, signed, name }) => {
172 | const val = value[name]
173 | if ((!signed && val < 0) || (signed && val < -(1 << (size - 1)))) { throw new Error(value + ' < ' + signed ? (-(1 << (size - 1))) : 0) } else if ((!signed && val >= 1 << size) ||
174 | (signed && val >= (1 << (size - 1)) - 1)) { throw new Error(value + ' >= ' + signed ? (1 << size) : ((1 << (size - 1)) - 1)) }
175 | while (size > 0) {
176 | const writeBits = Math.min(8 - bits, size)
177 | toWrite = toWrite << writeBits |
178 | ((val >> (size - writeBits)) & generateBitMask(writeBits))
179 | size -= writeBits
180 | bits += writeBits
181 | if (bits === 8) {
182 | buffer[offset++] = toWrite
183 | bits = 0
184 | toWrite = 0
185 | }
186 | }
187 | })
188 | if (bits !== 0) { buffer[offset++] = toWrite << (8 - bits) }
189 | return offset
190 | }
191 |
192 | function sizeOfBitField (value, typeArgs) {
193 | return Math.ceil(typeArgs.reduce((acc, { size }) => {
194 | return acc + size
195 | }, 0) / 8)
196 | }
197 |
198 | function readCString (buffer, offset, typeArgs) {
199 | let size = 0
200 | while (offset + size < buffer.length && buffer[offset + size] !== 0x00) { size++ }
201 | if (buffer.length < offset + size + 1) { throw new PartialReadError() }
202 |
203 | return {
204 | value: buffer.toString(typeArgs?.encoding || 'utf8', offset, offset + size),
205 | size: size + 1
206 | }
207 | }
208 |
209 | function writeCString (value, buffer, offset, typeArgs) {
210 | const length = Buffer.byteLength(value, typeArgs?.encoding || 'utf8')
211 | buffer.write(value, offset, length, typeArgs?.encoding || 'utf8')
212 | offset += length
213 | buffer.writeInt8(0x00, offset)
214 | return offset + 1
215 | }
216 |
217 | function sizeOfCString (value) {
218 | const length = Buffer.byteLength(value, 'utf8')
219 | return length + 1
220 | }
221 |
222 | function readBitflags (buffer, offset, { type, flags, shift, big }, rootNode) {
223 | const { size, value } = this.read(buffer, offset, type, rootNode)
224 | let f = {}
225 | if (Array.isArray(flags)) {
226 | for (const [k, v] of Object.entries(flags)) {
227 | f[v] = big ? (1n << BigInt(k)) : (1 << k)
228 | }
229 | } else if (shift) {
230 | for (const k in flags) {
231 | f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
232 | }
233 | } else {
234 | f = flags
235 | }
236 | const result = { _value: value }
237 | for (const key in f) {
238 | result[key] = (value & f[key]) === f[key]
239 | }
240 | return { value: result, size }
241 | }
242 |
243 | function writeBitflags (value, buffer, offset, { type, flags, shift, big }, rootNode) {
244 | let f = {}
245 | if (Array.isArray(flags)) {
246 | for (const [k, v] of Object.entries(flags)) {
247 | f[v] = big ? (1n << BigInt(k)) : (1 << k)
248 | }
249 | } else if (shift) {
250 | for (const k in flags) {
251 | f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
252 | }
253 | } else {
254 | f = flags
255 | }
256 | let val = value._value || (big ? 0n : 0)
257 | for (const key in f) {
258 | if (value[key]) val |= f[key]
259 | }
260 | return this.write(val, buffer, offset, type, rootNode)
261 | }
262 |
263 | function sizeOfBitflags (value, { type, flags, shift, big }, rootNode) {
264 | if (!value) throw new Error('Missing field')
265 | let f = {}
266 | if (Array.isArray(flags)) {
267 | for (const [k, v] of Object.entries(flags)) {
268 | f[v] = big ? (1n << BigInt(k)) : (1 << k)
269 | }
270 | } else if (shift) {
271 | for (const k in flags) {
272 | f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k])
273 | }
274 | } else {
275 | f = flags
276 | }
277 | let mappedValue = value._value || (big ? 0n : 0)
278 | for (const key in f) {
279 | if (value[key]) mappedValue |= f[key]
280 | }
281 | return this.sizeOf(mappedValue, type, rootNode)
282 | }
283 |
--------------------------------------------------------------------------------
/src/datatypes/varint.js:
--------------------------------------------------------------------------------
1 | const { PartialReadError } = require('../utils')
2 |
3 | module.exports = {
4 | varint: [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/numeric.json').varint],
5 | varint64: [readVarLong, writeVarLong, sizeOfVarLong, require('../../ProtoDef/schemas/numeric.json').varint64],
6 | varint128: [readVarLong128, writeVarLong, sizeOfVarLong, require('../../ProtoDef/schemas/numeric.json').varint128],
7 | zigzag32: [readSignedVarInt, writeSignedVarInt, sizeOfSignedVarInt, require('../../ProtoDef/schemas/numeric.json').zigzag32],
8 | zigzag64: [readSignedVarLong, writeSignedVarLong, sizeOfSignedVarLong, require('../../ProtoDef/schemas/numeric.json').zigzag64]
9 | }
10 |
11 | // u32
12 |
13 | function readVarInt (buffer, offset) {
14 | let result = 0
15 | let shift = 0
16 | let cursor = offset
17 |
18 | while (true) {
19 | if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarInt')
20 | const byte = buffer.readUInt8(cursor)
21 | result |= (byte & 0x7F) << shift // Add the bits, excluding the MSB
22 | cursor++
23 | if (!(byte & 0x80)) { // If MSB is not set, return result
24 | return { value: result, size: cursor - offset }
25 | }
26 | shift += 7
27 | if (shift > 64) throw new PartialReadError(`varint is too big: ${shift}`) // Make sure our shift don't overflow.
28 | }
29 | }
30 |
31 | function sizeOfVarInt (value) {
32 | let cursor = 0
33 | while (value & ~0x7F) {
34 | value >>>= 7
35 | cursor++
36 | }
37 | return cursor + 1
38 | }
39 |
40 | function writeVarInt (value, buffer, offset) {
41 | let cursor = 0
42 | while (value & ~0x7F) {
43 | buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor)
44 | cursor++
45 | value >>>= 7
46 | }
47 | buffer.writeUInt8(value, offset + cursor)
48 | return offset + cursor + 1
49 | }
50 |
51 | // u64
52 |
53 | function readVarLong (buffer, offset) {
54 | let result = 0n
55 | let shift = 0n
56 | let cursor = offset
57 |
58 | while (true) {
59 | if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarLong')
60 | const byte = buffer.readUInt8(cursor)
61 | result |= (BigInt(byte) & 0x7Fn) << shift // Add the bits, excluding the MSB
62 | cursor++
63 | if (!(byte & 0x80)) { // If MSB is not set, return result
64 | return { value: result, size: cursor - offset }
65 | }
66 | shift += 7n
67 | if (shift > 63n) throw new Error(`varint is too big: ${shift}`)
68 | }
69 | }
70 |
71 | function readVarLong128 (buffer, offset) {
72 | let result = 0n
73 | let shift = 0n
74 | let cursor = offset
75 |
76 | while (true) {
77 | if (cursor >= buffer.length) throw new PartialReadError('Unexpected buffer end while reading VarLong')
78 | const byte = buffer.readUInt8(cursor)
79 | result |= (BigInt(byte) & 0x7Fn) << shift // Add the bits, excluding the MSB
80 | cursor++
81 | if (!(byte & 0x80)) { // If MSB is not set, return result
82 | return { value: result, size: cursor - offset }
83 | }
84 | shift += 7n
85 | if (shift > 127n) throw new Error(`varint is too big: ${shift}`)
86 | }
87 | }
88 |
89 | function sizeOfVarLong (value) {
90 | value = BigInt(value)
91 | let size = 0
92 | do {
93 | value >>= 7n
94 | size++
95 | } while (value !== 0n)
96 | return size
97 | }
98 |
99 | function writeVarLong (value, buffer, offset) {
100 | value = BigInt(value)
101 | let cursor = offset
102 | do {
103 | const byte = value & 0x7Fn
104 | value >>= 7n
105 | buffer.writeUInt8(Number(byte) | (value ? 0x80 : 0), cursor++)
106 | } while (value)
107 | return cursor
108 | }
109 |
110 | // Zigzag 32
111 |
112 | function readSignedVarInt (buffer, offset) {
113 | const { value, size } = readVarInt(buffer, offset)
114 | return { value: (value >>> 1) ^ -(value & 1), size }
115 | }
116 |
117 | function sizeOfSignedVarInt (value) {
118 | return sizeOfVarInt((value << 1) ^ (value >> 31))
119 | }
120 |
121 | function writeSignedVarInt (value, buffer, offset) {
122 | return writeVarInt((value << 1) ^ (value >> 31), buffer, offset)
123 | }
124 |
125 | // Zigzag 64
126 |
127 | function readSignedVarLong (buffer, offset) {
128 | const { value, size } = readVarLong(buffer, offset)
129 | return { value: (value >> 1n) ^ -(value & 1n), size }
130 | }
131 |
132 | function sizeOfSignedVarLong (value) {
133 | return sizeOfVarLong((BigInt(value) << 1n) ^ (BigInt(value) >> 63n))
134 | }
135 |
136 | function writeSignedVarLong (value, buffer, offset) {
137 | return writeVarLong((BigInt(value) << 1n) ^ (BigInt(value) >> 63n), buffer, offset)
138 | }
139 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | const ProtoDef = require('./protodef')
2 | const proto = new ProtoDef()
3 |
4 | module.exports = {
5 | ProtoDef,
6 | Serializer: require('./serializer').Serializer,
7 | Parser: require('./serializer').Parser,
8 | FullPacketParser: require('./serializer').FullPacketParser,
9 | Compiler: require('./compiler'),
10 | types: proto.types,
11 | utils: require('./utils')
12 | }
13 |
--------------------------------------------------------------------------------
/src/protodef.js:
--------------------------------------------------------------------------------
1 | const { getFieldInfo, tryCatch } = require('./utils')
2 | const reduce = require('lodash.reduce')
3 | const Validator = require('protodef-validator')
4 |
5 | function isFieldInfo (type) {
6 | return typeof type === 'string' ||
7 | (Array.isArray(type) && typeof type[0] === 'string') ||
8 | type.type
9 | }
10 |
11 | function findArgs (acc, v, k) {
12 | if (typeof v === 'string' && v.charAt(0) === '$') { acc.push({ path: k, val: v.substr(1) }) } else if (Array.isArray(v) || typeof v === 'object') { acc = acc.concat(reduce(v, findArgs, []).map((v) => ({ path: k + '.' + v.path, val: v.val }))) }
13 | return acc
14 | }
15 |
16 | function setField (path, val, into) {
17 | const c = path.split('.').reverse()
18 | while (c.length > 1) {
19 | into = into[c.pop()]
20 | }
21 | into[c.pop()] = val
22 | }
23 |
24 | function extendType (functions, defaultTypeArgs) {
25 | const json = JSON.stringify(defaultTypeArgs)
26 | const argPos = reduce(defaultTypeArgs, findArgs, [])
27 | function produceArgs (typeArgs) {
28 | const args = JSON.parse(json)
29 | argPos.forEach((v) => {
30 | setField(v.path, typeArgs[v.val], args)
31 | })
32 | return args
33 | }
34 | return [function read (buffer, offset, typeArgs, context) {
35 | return functions[0].call(this, buffer, offset, produceArgs(typeArgs), context)
36 | }, function write (value, buffer, offset, typeArgs, context) {
37 | return functions[1].call(this, value, buffer, offset, produceArgs(typeArgs), context)
38 | }, function sizeOf (value, typeArgs, context) {
39 | if (typeof functions[2] === 'function') { return functions[2].call(this, value, produceArgs(typeArgs), context) } else { return functions[2] }
40 | }]
41 | }
42 |
43 | class ProtoDef {
44 | constructor (validation = true) {
45 | this.types = {}
46 | this.validator = validation ? new Validator() : null
47 | this.addDefaultTypes()
48 | }
49 |
50 | addDefaultTypes () {
51 | this.addTypes(require('./datatypes/numeric'))
52 | this.addTypes(require('./datatypes/utils'))
53 | this.addTypes(require('./datatypes/structures'))
54 | this.addTypes(require('./datatypes/conditional'))
55 | }
56 |
57 | addProtocol (protocolData, path) {
58 | const self = this
59 | function recursiveAddTypes (protocolData, path) {
60 | if (protocolData === undefined) { return }
61 | if (protocolData.types) { self.addTypes(protocolData.types) }
62 | recursiveAddTypes(protocolData?.[path[0]], path.slice(1))
63 | }
64 |
65 | if (this.validator) { this.validator.validateProtocol(protocolData) }
66 |
67 | recursiveAddTypes(protocolData, path)
68 | }
69 |
70 | addType (name, functions, validate = true) {
71 | if (functions === 'native') {
72 | if (this.validator) { this.validator.addType(name) }
73 | return
74 | }
75 | if (isFieldInfo(functions)) {
76 | if (this.validator) {
77 | if (validate) { this.validator.validateType(functions) }
78 | this.validator.addType(name)
79 | }
80 |
81 | const { type, typeArgs } = getFieldInfo(functions)
82 | this.types[name] = typeArgs ? extendType(this.types[type], typeArgs) : this.types[type]
83 | } else {
84 | if (this.validator) {
85 | if (functions[3]) {
86 | this.validator.addType(name, functions[3])
87 | } else { this.validator.addType(name) }
88 | }
89 |
90 | this.types[name] = functions
91 | }
92 | }
93 |
94 | addTypes (types) {
95 | Object.keys(types).forEach((name) => this.addType(name, types[name], false))
96 | if (this.validator) {
97 | Object.keys(types).forEach((name) => {
98 | if (isFieldInfo(types[name])) {
99 | this.validator.validateType(types[name])
100 | }
101 | })
102 | }
103 | }
104 |
105 | setVariable (key, val) {
106 | this.types[key] = val
107 | }
108 |
109 | read (buffer, cursor, _fieldInfo, rootNodes) {
110 | const { type, typeArgs } = getFieldInfo(_fieldInfo)
111 | const typeFunctions = this.types[type]
112 | if (!typeFunctions) { throw new Error('missing data type: ' + type) }
113 | return typeFunctions[0].call(this, buffer, cursor, typeArgs, rootNodes)
114 | }
115 |
116 | write (value, buffer, offset, _fieldInfo, rootNode) {
117 | const { type, typeArgs } = getFieldInfo(_fieldInfo)
118 | const typeFunctions = this.types[type]
119 | if (!typeFunctions) { throw new Error('missing data type: ' + type) }
120 | return typeFunctions[1].call(this, value, buffer, offset, typeArgs, rootNode)
121 | }
122 |
123 | sizeOf (value, _fieldInfo, rootNode) {
124 | const { type, typeArgs } = getFieldInfo(_fieldInfo)
125 | const typeFunctions = this.types[type]
126 | if (!typeFunctions) {
127 | throw new Error('missing data type: ' + type)
128 | }
129 | if (typeof typeFunctions[2] === 'function') {
130 | return typeFunctions[2].call(this, value, typeArgs, rootNode)
131 | } else {
132 | return typeFunctions[2]
133 | }
134 | }
135 |
136 | createPacketBuffer (type, packet) {
137 | const length = tryCatch(() => this.sizeOf(packet, type, {}),
138 | (e) => {
139 | e.message = `SizeOf error for ${e.field} : ${e.message}`
140 | throw e
141 | })
142 | const buffer = Buffer.allocUnsafe(length)
143 | tryCatch(() => this.write(packet, buffer, 0, type, {}),
144 | (e) => {
145 | e.message = `Write error for ${e.field} : ${e.message}`
146 | throw e
147 | })
148 | return buffer
149 | }
150 |
151 | parsePacketBuffer (type, buffer, offset = 0) {
152 | const { value, size } = tryCatch(() => this.read(buffer, offset, type, {}),
153 | (e) => {
154 | e.message = `Read error for ${e.field} : ${e.message}`
155 | throw e
156 | })
157 | return {
158 | data: value,
159 | metadata: {
160 | size
161 | },
162 | buffer: buffer.slice(0, size),
163 | fullBuffer: buffer
164 | }
165 | }
166 | }
167 |
168 | module.exports = ProtoDef
169 |
--------------------------------------------------------------------------------
/src/serializer.js:
--------------------------------------------------------------------------------
1 | const Transform = require('readable-stream').Transform
2 |
3 | class Serializer extends Transform {
4 | constructor (proto, mainType) {
5 | super({ writableObjectMode: true })
6 | this.proto = proto
7 | this.mainType = mainType
8 | this.queue = Buffer.alloc(0)
9 | }
10 |
11 | createPacketBuffer (packet) {
12 | return this.proto.createPacketBuffer(this.mainType, packet)
13 | }
14 |
15 | _transform (chunk, enc, cb) {
16 | let buf
17 | try {
18 | buf = this.createPacketBuffer(chunk)
19 | } catch (e) {
20 | return cb(e)
21 | }
22 | this.push(buf)
23 | return cb()
24 | }
25 | }
26 |
27 | class Parser extends Transform {
28 | constructor (proto, mainType) {
29 | super({ readableObjectMode: true })
30 | this.proto = proto
31 | this.mainType = mainType
32 | this.queue = Buffer.alloc(0)
33 | }
34 |
35 | parsePacketBuffer (buffer) {
36 | return this.proto.parsePacketBuffer(this.mainType, buffer)
37 | }
38 |
39 | _transform (chunk, enc, cb) {
40 | this.queue = Buffer.concat([this.queue, chunk])
41 | while (true) {
42 | let packet
43 | try {
44 | packet = this.parsePacketBuffer(this.queue)
45 | } catch (e) {
46 | if (e.partialReadError) { return cb() } else {
47 | e.buffer = this.queue
48 | this.queue = Buffer.alloc(0)
49 | return cb(e)
50 | }
51 | }
52 |
53 | this.push(packet)
54 | this.queue = this.queue.slice(packet.metadata.size)
55 | }
56 | }
57 | }
58 |
59 | class FullPacketParser extends Transform {
60 | constructor (proto, mainType, noErrorLogging = false) {
61 | super({ readableObjectMode: true })
62 | this.proto = proto
63 | this.mainType = mainType
64 | this.noErrorLogging = noErrorLogging
65 | }
66 |
67 | parsePacketBuffer (buffer) {
68 | return this.proto.parsePacketBuffer(this.mainType, buffer)
69 | }
70 |
71 | _transform (chunk, enc, cb) {
72 | let packet
73 | try {
74 | packet = this.parsePacketBuffer(chunk)
75 | if (packet.metadata.size !== chunk.length && !this.noErrorLogging) {
76 | console.log('Chunk size is ' + chunk.length + ' but only ' + packet.metadata.size + ' was read ; partial packet : ' +
77 | JSON.stringify(packet.data) + '; buffer :' + chunk.toString('hex'))
78 | }
79 | } catch (e) {
80 | if (e.partialReadError) {
81 | if (!this.noErrorLogging) {
82 | console.log(e.stack)
83 | }
84 | return cb()
85 | } else {
86 | return cb(e)
87 | }
88 | }
89 | this.push(packet)
90 | cb()
91 | }
92 | }
93 |
94 | module.exports = {
95 | Serializer,
96 | Parser,
97 | FullPacketParser
98 | }
99 |
--------------------------------------------------------------------------------
/src/utils.js:
--------------------------------------------------------------------------------
1 | function getField (countField, context) {
2 | const countFieldArr = countField.split('/')
3 | let i = 0
4 | if (countFieldArr[i] === '') {
5 | while (context['..'] !== undefined) { context = context['..'] }
6 | i++
7 | }
8 | for (; i < countFieldArr.length; i++) { context = context[countFieldArr[i]] }
9 | return context
10 | }
11 |
12 | function getFieldInfo (fieldInfo) {
13 | if (typeof fieldInfo === 'string') { return { type: fieldInfo } } else if (Array.isArray(fieldInfo)) { return { type: fieldInfo[0], typeArgs: fieldInfo[1] } } else if (typeof fieldInfo.type === 'string') { return fieldInfo } else { throw new Error('Not a fieldinfo') }
14 | }
15 |
16 | function getCount (buffer, offset, { count, countType }, rootNode) {
17 | let c = 0
18 | let size = 0
19 | if (typeof count === 'number') { c = count } else if (typeof count !== 'undefined') {
20 | c = getField(count, rootNode)
21 | } else if (typeof countType !== 'undefined') {
22 | ({ size, value: c } = tryDoc(() => this.read(buffer, offset, getFieldInfo(countType), rootNode), '$count'))
23 | } else { // TODO : broken schema, should probably error out.
24 | c = 0
25 | }
26 | return { count: c, size }
27 | }
28 |
29 | function sendCount (len, buffer, offset, { count, countType }, rootNode) {
30 | if (typeof count !== 'undefined' && len !== count) {
31 | // TODO: Throw
32 | } else if (typeof countType !== 'undefined') {
33 | offset = this.write(len, buffer, offset, getFieldInfo(countType), rootNode)
34 | } else {
35 | // TODO: Throw
36 | }
37 | return offset
38 | }
39 |
40 | function calcCount (len, { count, countType }, rootNode) {
41 | if (typeof count === 'undefined' && typeof countType !== 'undefined') { return tryDoc(() => this.sizeOf(len, getFieldInfo(countType), rootNode), '$count') } else { return 0 }
42 | }
43 |
44 | function addErrorField (e, field) {
45 | e.field = e.field ? field + '.' + e.field : field
46 | throw e
47 | }
48 |
49 | function tryCatch (tryfn, catchfn) {
50 | try { return tryfn() } catch (e) { catchfn(e) }
51 | }
52 |
53 | function tryDoc (tryfn, field) {
54 | return tryCatch(tryfn, (e) => addErrorField(e, field))
55 | }
56 |
57 | class ExtendableError extends Error {
58 | constructor (message) {
59 | super(message)
60 | this.name = this.constructor.name
61 | this.message = message
62 | if (Error.captureStackTrace != null) {
63 | Error.captureStackTrace(this, this.constructor.name)
64 | }
65 | }
66 | }
67 |
68 | class PartialReadError extends ExtendableError {
69 | constructor (message) {
70 | super(message)
71 | this.partialReadError = true
72 | }
73 | }
74 |
75 | module.exports = {
76 | getField,
77 | getFieldInfo,
78 | addErrorField,
79 | getCount,
80 | sendCount,
81 | calcCount,
82 | tryCatch,
83 | tryDoc,
84 | PartialReadError
85 | }
86 |
--------------------------------------------------------------------------------
/test/dataTypes/datatypes.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | const expect = require('chai').expect
4 | const Validator = require('jsonschema').Validator
5 | const v = new Validator()
6 | const assert = require('assert')
7 |
8 | const { testData, proto, compiledProto } = require('./prepareTests')
9 |
10 | function testValue (type, value, buffer) {
11 | it('writes', function () {
12 | expect(proto.createPacketBuffer(type, value)).to.deep.equal(buffer)
13 | })
14 | it('reads', function () {
15 | const actualResult = proto.parsePacketBuffer(type, buffer)
16 | if (typeof actualResult.data === 'bigint') value = BigInt(value)
17 | if (value === null) {
18 | assert.ok(actualResult.data === undefined)
19 | } else {
20 | expect(actualResult.data).to.deep.equal(value)
21 | }
22 | expect(actualResult.metadata.size).to.deep.equal(buffer.length)
23 | })
24 | it('writes (compiled)', function () {
25 | expect(compiledProto.createPacketBuffer(type, value)).to.deep.equal(buffer)
26 | })
27 | it('reads (compiled)', function () {
28 | const actualResult = compiledProto.parsePacketBuffer(type, buffer)
29 | if (value === null) { assert.ok(actualResult.data === undefined) } else { expect(actualResult.data).to.deep.equal(value) }
30 | expect(actualResult.metadata.size).to.deep.equal(buffer.length)
31 | })
32 |
33 | if (type === 'i64' || type === 'u64') {
34 | it('reads bigint correctly ' + type, function () {
35 | const [top, lower] = value.map(BigInt)
36 | const joined = type === 'i64' ? BigInt.asIntN(64, (top << 32n) | lower) : BigInt.asUintN(64, (top << 32n) | lower)
37 | // read
38 | const actualResult = proto.parsePacketBuffer(type, buffer)
39 | expect(actualResult.data.valueOf() === joined)
40 | expect(actualResult.metadata.size).to.deep.equal(buffer.length)
41 | })
42 | }
43 | }
44 |
45 | function testType (type, values) {
46 | if (values.length === 0) {
47 | it.skip('Has no tests', () => {
48 |
49 | })
50 | }
51 | values.forEach((value) => {
52 | if (value.description) {
53 | describe(value.description, () => {
54 | testValue(type, value.value, value.buffer)
55 | })
56 | } else { testValue(type, value.value, value.buffer) }
57 | })
58 | if (type !== 'void') {
59 | it('reads 0 bytes and throw a PartialReadError', () => {
60 | try {
61 | proto.parsePacketBuffer(type, Buffer.alloc(0))
62 | } catch (e) {
63 | if (!e.partialReadError) { throw e }
64 | return
65 | }
66 | throw Error('no PartialReadError thrown')
67 | })
68 |
69 | it('reads 0 bytes and throw a PartialReadError (compiled)', () => {
70 | try {
71 | compiledProto.parsePacketBuffer(type, Buffer.alloc(0))
72 | } catch (e) {
73 | if (!e.partialReadError) { throw e }
74 | return
75 | }
76 | throw Error('no PartialReadError thrown')
77 | })
78 | }
79 | }
80 |
81 | testData.forEach(tests => {
82 | describe(tests.kind, () => {
83 | it('validates the json schema', () => {
84 | const schema = require('../../ProtoDef/test/datatype_tests_schema.json')
85 | v.addSchema(require('../../ProtoDef/schemas/datatype'), 'dataType')
86 | const result = v.validate(tests.originalData, schema)
87 | assert.strictEqual(result.errors.length, 0, require('util').inspect(result.errors, { depth: null }))
88 | })
89 |
90 | tests.data.forEach(test => {
91 | describe(test.type, () => {
92 | test.subtypes.forEach((subtype) => {
93 | if (subtype.description) {
94 | describe(subtype.description, () => {
95 | testType(subtype.type, subtype.values)
96 | })
97 | } else { testType(test.type, subtype.values) }
98 | })
99 | })
100 | })
101 | })
102 | })
103 |
--------------------------------------------------------------------------------
/test/dataTypes/prepareTests.js:
--------------------------------------------------------------------------------
1 | const ProtoDef = require('protodef').ProtoDef
2 | const { ProtoDefCompiler } = require('protodef').Compiler
3 |
4 | const proto = new ProtoDef()
5 | const compiler = new ProtoDefCompiler()
6 |
7 | const testData = [
8 | {
9 | kind: 'conditional',
10 | data: require('../../ProtoDef/test/conditional.json')
11 | },
12 | {
13 | kind: 'numeric',
14 | data: require('../../ProtoDef/test/numeric.json')
15 | },
16 | {
17 | kind: 'structures',
18 | data: require('../../ProtoDef/test/structures.json')
19 | },
20 | {
21 | kind: 'utils',
22 | data: require('../../ProtoDef/test/utils.json')
23 | }
24 | ]
25 |
26 | function arrayToBuffer (arr) {
27 | return Buffer.from(arr.map(e => parseInt(e)))
28 | }
29 |
30 | function transformValues (type, values) {
31 | return values.map(val => {
32 | let value = val.value
33 | if (type.indexOf('buffer') === 0) {
34 | value = arrayToBuffer(value)
35 | } else if (value) {
36 | // we cannot use undefined type in JSON so need to convert it here to pass strictEquals test
37 | for (const key in value) {
38 | if (value[key] === 'undefined') value[key] = undefined
39 | }
40 | }
41 | return {
42 | buffer: arrayToBuffer(val.buffer),
43 | value,
44 | description: val.description
45 | }
46 | })
47 | }
48 |
49 | testData.forEach(tests => {
50 | tests.originalData = JSON.parse(JSON.stringify(tests.data))
51 | tests.data.forEach(test => {
52 | const subTypes = []
53 | if (test.subtypes) {
54 | test.subtypes.forEach((subtype, i) => {
55 | const type = test.type + '_' + i
56 | proto.addType(type, subtype.type)
57 | const types = {}
58 | types[type] = subtype.type
59 | compiler.addTypesToCompile(types)
60 |
61 | subtype.vars?.forEach(([k, v]) => { proto.setVariable(k, v); compiler.addVariable(k, v) })
62 | subtype.values = transformValues(test.type, subtype.values)
63 | subtype.type = type
64 | subTypes.push(subtype)
65 | })
66 | } else {
67 | test.values = transformValues(test.type, test.values)
68 | subTypes.push({ type: test.type, values: test.values })
69 | }
70 | test.subtypes = subTypes
71 | })
72 | })
73 |
74 | module.exports = {
75 | testData,
76 | proto,
77 | compiledProto: compiler.compileProtoDefSync()
78 | }
79 |
--------------------------------------------------------------------------------
/test/misc.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 |
3 | it('example works', () => {
4 | require('../example')
5 | })
6 |
--------------------------------------------------------------------------------