├── .dockerignore
├── .github
├── ISSUE_TEMPLATE.md
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── ci.yaml
│ └── stale.yaml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── contracts
├── EventVerifier.sol
├── Ion.sol
├── IonCompatible.sol
├── Migrations.sol
├── functional
│ ├── FabricFunction.sol
│ ├── Function.sol
│ ├── Trigger.sol
│ └── TriggerEventVerifier.sol
├── libraries
│ ├── ECVerify.sol
│ ├── PatriciaTrie.sol
│ ├── RLP.sol
│ ├── SafeMath.sol
│ └── SolidityUtils.sol
├── mock
│ ├── MockIon.sol
│ ├── MockStorage.sol
│ └── MockValidation.sol
├── storage
│ ├── BlockStore.sol
│ ├── EthereumStore.sol
│ └── FabricStore.sol
├── test
│ └── PatriciaTrieTest.sol
└── validation
│ ├── Base.sol
│ ├── Clique.sol
│ └── IBFT.sol
├── docker_build
├── account
│ ├── keystore
│ │ └── UTC--2018-06-05T09-31-57.109288703Z--2be5ab0e43b6dc2908d5321cf318f35b80d0c10d
│ └── password-2be5ab0e43b6dc2908d5321cf318f35b80d0c10d.txt
├── clique.json
├── launch_geth.sh
└── password
├── docs
├── Ion-CLI.md
└── Roadmap.md
├── migrations
├── 1_initial_migration.js
└── 2_deploy_contracts.js
├── package-lock.json
├── package.json
├── test
├── clique.js
├── helpers
│ ├── encoder.js
│ └── utils.js
├── ibft.js
├── integration-base_fabric.js
├── integration-clique_ethereum.js
├── ion.js
├── patricia_trie_test.js
├── storage-ethereum.js
└── storage-fabric.js
└── truffle.js
/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | As a user
2 | I want
3 | So that
4 |
5 |
6 | ### Acceptance criteria
7 | -
8 | -
9 | -
10 |
11 | ### Related issues
12 | -
13 | -
14 | -
15 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ### Description:
2 |
3 | ### All Submissions:
4 |
5 | * [ ] Have you followed the guidelines in our [Contributing document](CONTRIBUTING.md)?
6 | * [ ] Have you checked to ensure there aren't other open [Pull Requests](../../../pulls) for the same update/change?
7 | * [ ] Have you signed your commits?
8 | * [ ] Does your submission pass tests?
9 | * [ ] Have you linted your code locally prior to submission?
10 |
11 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yaml:
--------------------------------------------------------------------------------
1 | on: ["push", "pull_request"]
2 | name: "Continuous Integration"
3 |
4 | jobs:
5 | build:
6 |
7 | runs-on: ubuntu-latest
8 |
9 | strategy:
10 | matrix:
11 | node-version: [10.x]
12 |
13 | steps:
14 | - uses: actions/checkout@v2
15 |
16 | - name: Cache node modules
17 | uses: actions/cache@v1
18 | with:
19 | path: ~/.npm
20 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
21 | restore-keys: |
22 | ${{ runner.os }}-node-
23 |
24 | - name: Use Node.js ${{ matrix.node-version }}
25 | uses: actions/setup-node@v1
26 | with:
27 | node-version: ${{ matrix.node-version }}
28 | - run: npm install
29 | - run: npm run testrpc &
30 | - run: npm test
31 | env:
32 | CI: true
33 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yaml:
--------------------------------------------------------------------------------
1 |
2 | name: "Close stale issues"
3 |
4 | on:
5 | schedule:
6 | - cron: "0 0 * * *"
7 |
8 | jobs:
9 | stale:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/stale@v1
13 | with:
14 | repo-token: ${{ secrets.GITHUB_TOKEN }}
15 | stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days'
16 | days-before-stale: 30
17 | days-before-close: 5
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /abi
3 | /dist
4 | /chaindata
5 | /node_modules
6 | /coverage
7 | /.idea
8 | coverage.json
9 | *.pyc
10 | docs/deps-files.dot
11 | docs/deps-modules.dot
12 | *.log
13 | *.pdf
14 | *.egg-info/
15 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | * Using welcoming and inclusive language
12 | * Being respectful of differing viewpoints and experiences
13 | * Gracefully accepting constructive criticism
14 | * Focusing on what is best for the community
15 | * Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances
20 | * Trolling, insulting/derogatory comments, and personal or political attacks
21 | * Public or private harassment
22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Scope
32 |
33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34 |
35 | ## Enforcement
36 |
37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [opensource@clearmatics.com][email]. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38 |
39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40 |
41 | ## Attribution
42 |
43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44 |
45 | [email]: mailto:opensource@clearmatics.com
46 | [homepage]: http://contributor-covenant.org
47 | [version]: http://contributor-covenant.org/version/1/4/
48 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | When contributing to this repository, please first discuss the change you wish to make via issue,
4 | email, or any other method with the owners of this repository before making a change.
5 |
6 | Please note we have a code of conduct, please follow it in all your interactions with the project.
7 |
8 | ## Pull Request Process
9 |
10 | 1. Ensure any install or build dependencies are removed before the end of the layer when doing a
11 | build.
12 | 2. Update the README.md with details of changes to the interface, this includes new environment
13 | variables, exposed ports, useful file locations and container parameters.
14 | 3. Increase the version numbers in any examples files and the README.md to the new version that this
15 | Pull Request would represent. The versioning scheme we use is [SemVer][semver].
16 | 4. You may merge the Pull Request in once you have the sign-off of two other developers, or if you
17 | do not have permission to do that, you may request the second reviewer to merge it for you.
18 |
19 | ## Code of Conduct
20 |
21 | This project and everyone participating in it is governed by the [Code of Conduct][codeofconduct]. By participating, you are expected to uphold this code. Please report unacceptable behavior to [opensource@clearmatics.com][email].
22 |
23 | [codeofconduct]: CODE_OF_CONDUCT.md
24 | [semver]: http://semver.org/
25 | [email]: mailto:opensource@clearmatics.com
26 |
27 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.8
2 |
3 | RUN printf "deb http://archive.debian.org/debian/ jessie main\ndeb-src http://archive.debian.org/debian/ jessie main\ndeb http://security.debian.org jessie/updates main\ndeb-src http://security.debian.org jessie/updates main" > /etc/apt/sources.list
4 |
5 | RUN apt-get update && apt-get install -y \
6 | vim \
7 | curl \
8 | sudo \
9 | wget
10 |
11 |
12 | # Install a recent version of nodejs
13 | RUN curl -sL https://deb.nodesource.com/setup_10.x | sudo bash - && sudo apt-get install -y nodejs
14 | COPY . /go/src/github.com/clearmatics/ion
15 |
16 | # Install the current compatible solc version
17 | RUN wget https://github.com/ethereum/solidity/releases/download/v0.4.25/solc-static-linux -O solc
18 | RUN chmod +x ./solc
19 | RUN cp ./solc /go/src/github.com/clearmatics/ion
20 | ENV PATH $PATH:/go/src/github.com/clearmatics/ion
21 |
22 | WORKDIR /go/src/github.com/clearmatics/ion
23 |
24 | CMD ["/bin/bash"]
25 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
167 |
--------------------------------------------------------------------------------
/contracts/EventVerifier.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "./libraries/RLP.sol";
6 | import "./libraries/SolidityUtils.sol";
7 |
8 | /*
9 | EventVerifier
10 |
11 | This contract is the basic global EventVerifier interface that all specific event verifiers must inherit from.
12 |
13 | It supplies a function `retrieveLog` that will output the relevant log for a specified event signature from the
14 | provided receipts. Each specific verifier that inherits this contract must hold knowledge of the event signature it
15 | intends to consume which will be passed to the retrieval function for log separation.
16 | */
17 |
18 | contract EventVerifier {
19 | /*
20 | retrieveLog
21 | param: _eventSignature (bytes32) Hash representing the event signature of the event type to be consumed
22 | param: _contractEmittedAddress (bytes20) Address of the contract expected to have emitted the event
23 | param: _rlpReceipt (bytes) RLP-encoded receipt containing the relevant logs
24 |
25 | returns: log (RLP.RLPItem[]) Decoded log object in the form [ contractAddress, topics, data ]
26 |
27 | This decodes an RLP-encoded receipt and trawls through the logs to find the event that matches the event
28 | signature required and checks if the event was emitted from the correct source. If no log could be found with
29 | the relevant signature or emitted from the expected source the execution fails with an assert.
30 |
31 | If a log is not found, an `assert(false)` consumes all the gas and fails the transaction in order to incentivise
32 | submission of proper data.
33 | */
34 | function retrieveLog(bytes32 _eventSignature, bytes20 _contractEmittedAddress, bytes memory _rlpReceipt)
35 | internal pure returns (RLP.RLPItem[] memory)
36 | {
37 | /* Decode the receipt into it's consituents and grab the logs with it's known position in the receipt
38 | object and proceed to decode the logs also.
39 | */
40 | RLP.RLPItem[] memory receipt = RLP.toList(RLP.toRLPItem(_rlpReceipt));
41 | RLP.RLPItem[] memory logs = RLP.toList(receipt[3]);
42 |
43 | /* The receipts could contain multiple event logs if a single transaction emitted multiple events. We need to
44 | separate them and locate the relevant event by signature.
45 | */
46 | for (uint i = 0; i < logs.length; i++) {
47 | RLP.RLPItem[] memory log = RLP.toList(logs[i]);
48 | RLP.RLPItem[] memory topics = RLP.toList(log[1]);
49 |
50 | bytes32 containedEventSignature = RLP.toBytes32(topics[0]);
51 | if (containedEventSignature == _eventSignature) {
52 | // If event signature is found, check the contract address it was emitted from
53 | bytes20 b20_emissionSource = SolUtils.BytesToBytes20(RLP.toData(log[0]), 0);
54 | assert( b20_emissionSource == _contractEmittedAddress);
55 | return log;
56 | }
57 | }
58 | assert( false );
59 | }
60 |
61 | }
62 |
--------------------------------------------------------------------------------
/contracts/Ion.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "./libraries/ECVerify.sol";
6 | import "./libraries/RLP.sol";
7 | import "./libraries/PatriciaTrie.sol";
8 | import "./libraries/SolidityUtils.sol";
9 | import "./storage/BlockStore.sol";
10 |
11 | contract Ion {
12 |
13 | bytes32 public chainId;
14 | mapping (address => bool) public m_registered_validation;
15 | address[] public validation_modules;
16 |
17 | /*
18 | * Constructor
19 | * param: id (bytes32) Unique id to identify this chain that the contract is being deployed to.
20 | *
21 | * Supplied with a unique id to identify this chain to others that may interoperate with it.
22 | * The deployer must assert that the id is indeed public and that it is not already being used
23 | * by another chain
24 | */
25 | constructor(bytes32 _id) public {
26 | chainId = _id;
27 | }
28 |
29 | /*
30 | * onlyRegisteredValidation
31 | * param: _addr (address) Address of the Validation module being registered
32 | *
33 | * Modifier that checks if the provided chain id has been registered to this contract
34 | */
35 | modifier onlyRegisteredValidation() {
36 | require( isContract(msg.sender), "Caller address is not a valid contract. Please inherit the BlockStore contract for proper usage." );
37 | require( m_registered_validation[msg.sender], "Validation module is not registered");
38 | _;
39 | }
40 |
41 | // Pseudo-modifier returns boolean, used with different 'require's to input custom revert messages
42 | function isContract(address _addr) internal view returns (bool) {
43 | uint size;
44 | assembly { size := extcodesize(_addr) }
45 | return (size > 0);
46 | }
47 |
48 |
49 | function registerValidationModule() public {
50 | require( isContract(msg.sender), "Caller address is not a valid contract. Please inherit the BlockStore contract for proper usage." );
51 | require( !m_registered_validation[msg.sender], "Validation module has already been registered." );
52 |
53 | m_registered_validation[msg.sender] = true;
54 | validation_modules.push(msg.sender);
55 | }
56 |
57 | function addChain(address _storageAddress, bytes32 _chainId) onlyRegisteredValidation public {
58 | BlockStore store = BlockStore(_storageAddress);
59 | store.addChain(_chainId);
60 | }
61 |
62 | /*
63 | * storeBlock
64 | * param:
65 | *
66 | */
67 | function storeBlock(address _storageAddress, bytes32 _chainId, bytes memory _blockBlob) onlyRegisteredValidation public {
68 | require( isContract(_storageAddress), "Storage address provided is not contract.");
69 | BlockStore store = BlockStore(_storageAddress);
70 |
71 | store.addBlock(_chainId, _blockBlob);
72 | }
73 | }
--------------------------------------------------------------------------------
/contracts/IonCompatible.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 | import "./Ion.sol";
5 |
6 | contract IonCompatible {
7 | /* The Ion contract that proofs would be made to. Ensure that prior to verification attempts that the relevant
8 | blocks have been submitted to the Ion contract. */
9 | Ion internal ion;
10 |
11 | constructor(address _ionAddr) public {
12 | ion = Ion(_ionAddr);
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/contracts/Migrations.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2017 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | contract Migrations {
6 | address public owner;
7 | uint256 public last_completed_migration;
8 |
9 | modifier restricted() {
10 | if (msg.sender == owner) _;
11 | }
12 |
13 | constructor() public {
14 | owner = msg.sender;
15 | }
16 |
17 | function setCompleted(uint256 completed) public restricted {
18 | last_completed_migration = completed;
19 | }
20 |
21 | function upgrade(address new_address) public restricted {
22 | Migrations upgraded = Migrations(new_address);
23 | upgraded.setCompleted(last_completed_migration);
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/contracts/functional/FabricFunction.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../storage/FabricStore.sol";
4 |
5 | contract FabricFunction {
6 | FabricStore blockStore;
7 |
8 | constructor(address _storeAddr) public {
9 | blockStore = FabricStore(_storeAddr);
10 | }
11 |
12 | event State(uint blockNo, uint txNo, string mvalue);
13 |
14 | function execute(uint _blockNo, uint _txNo, string memory _value) internal {
15 | emit State(_blockNo, _txNo, _value);
16 | }
17 |
18 | function retrieveAndExecute(bytes32 _chainId, string memory _channelId, string memory _key) public {
19 | uint blockVersion;
20 | uint txVersion;
21 | string memory value;
22 |
23 | (blockVersion, txVersion, value) = blockStore.getState(_chainId, _channelId, _key);
24 |
25 | execute(blockVersion, txVersion, value);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/contracts/functional/Function.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "../storage/EthereumStore.sol";
6 |
7 | contract TriggerEventVerifier {
8 | function verify(bytes20 _contractEmittedAddress, bytes memory _rlpReceipt, bytes20 _expectedAddress) public returns (bool);
9 | }
10 |
11 | /*
12 | This function contract is the consumer of an event and performs some execution thereafter. In practice, this would
13 | be written by a contract designer that intends to consume specific events from another chain. As such all behaviour
14 | and dependence on such event must be defined here.
15 |
16 | Common custom behaviour may include:
17 | * Keeping an array of transaction hashes denoting the specific events from that transaction that
18 | have already been consumed to restrict multiple consumption or 'double spend' of events.
19 | * Extending the amount of expected event parameters above the stack limit. This might then require some other method
20 | of passing expected parameters to the contract possibly via RLP-encoding to compress all data to a single argument
21 | and decoding them within the `verifyAndExecute` function.
22 | * Including multiple event verifiers if a function requires proof of multiple state transitions from other chains.
23 | This would also bloat the local scope which is prone to 'stack too deep' issues which would require custom
24 | workarounds.
25 | */
26 | contract Function {
27 | EthereumStore blockStore;
28 |
29 | /* The event verifier for the specific event being consumed. Each event would require a different event verifier to
30 | be deployed and each consumer would reference the relevant verifier to prove logs. */
31 | TriggerEventVerifier verifier;
32 |
33 | /* Custom event that fires when execution is performed successfully. */
34 | event Executed();
35 |
36 | /* Constructor. Requires Ion contract address and all used event verifier contract addresses. In this case we only
37 | use one verifier. */
38 | constructor(address _storeAddr, address _verifierAddr) public {
39 | blockStore = EthereumStore(_storeAddr);
40 | verifier = TriggerEventVerifier(_verifierAddr);
41 | }
42 |
43 | /* This is the function that is intended to be executed upon successful verification of proofs */
44 | function execute() internal {
45 | emit Executed();
46 | }
47 |
48 | /*
49 | verifyAndExecute
50 |
51 | Core parameters for verification
52 | param: _chainId (bytes32) Chain ID of the chain that the event being consumed was emitted on. This may require
53 | altering to (bytes) if proofs from multiple chains are needed.
54 | param: _blockHash (bytes32) Block hash of block with event to be consumed. This may require altering to (bytes)
55 | if proofs from multiple chains are needed.
56 | param: _contractEmittedAddress (bytes20) Contract address of the source of event emission. This may require
57 | altering to (bytes) if proofs from multiple chains are needed.
58 | param: _path (bytes) RLP-encoded transaction index of the relevant transaction that emitted the event being
59 | consumed. If multiple proofs are required, multiple paths supplied must be RLP-encoded
60 | and an extra (bool) parameter provided to denote multiple paths included.
61 | param: _tx (bytes) RLP-encoded transaction object provided by proof generation.
62 | param: _txNodes (bytes) RLP_encoded transaction nodes provided by proof generation.
63 | param: _receipt (bytes) RLP-encoded receipt object provided by proof generation.
64 | param: _receiptNodes (bytes) RLP-encoded receipt nodes provided by proof generation.
65 |
66 | Custom parameters for verification
67 | param: _expectedAddress (bytes20) The expected address value in the event parameter being consumed.
68 |
69 | This is the only public function apart from the constructor and is the only interface to this contract. This
70 | function wraps the verification and execution which only fires after a successful slew of verifications. As
71 | noted, stack restrictions will make it harder to implement multiple event consumption. Suggestions made here may
72 | not be the best way to achieve this but are possible methods. It may end up requiring separate functions for
73 | each event and persisting the consumption state of each event per tx hash and using that to allow or prevent
74 | verified execution. In our case, it is simple as we only consume a single event.
75 | */
76 | function verifyAndExecute(
77 | bytes32 _chainId,
78 | bytes32 _blockHash,
79 | bytes20 _contractEmittedAddress,
80 | bytes memory _proof,
81 | bytes20 _expectedAddress
82 | ) public {
83 | bytes memory receipt = blockStore.CheckProofs(_chainId, _blockHash, _proof);
84 |
85 | require( verifier.verify(_contractEmittedAddress, receipt, _expectedAddress), "Event verification failed." );
86 | execute();
87 | }
88 | }
89 |
90 |
--------------------------------------------------------------------------------
/contracts/functional/Trigger.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | /*
6 | Trigger
7 |
8 | Example contract that emits an event to be consumed.
9 |
10 | Currently an instance deployed to:
11 | Rinkeby @: 0x61621bcf02914668f8404c1f860e92fc1893f74c
12 | Deployment Tx Hash: 0xc9500e84af2394e1d91b43e40c9c89f105636748f95ae05c11c73f2fd755795e
13 | Deployed Block Number: 2657325
14 | `fire()` call Tx Hash 0xafc3ab60059ed38e71c7f6bea036822abe16b2c02fcf770a4f4b5fffcbfe6e7e
15 | `fire()` call Tx Hash 0xf8a9a2f7e894f243fd12e5379c1dca2e139817f440e0ced7a8db42ec8dcf30ff
16 |
17 | The current tests are running against generated proofs from Rinkeby for the above data and consumes the event
18 | emitted in the transaction executed.
19 | */
20 |
21 | contract Trigger {
22 | event Triggered(address caller);
23 |
24 | function fire() public {
25 | emit Triggered(msg.sender);
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/contracts/functional/TriggerEventVerifier.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "../libraries/RLP.sol";
6 | import "../libraries/SolidityUtils.sol";
7 | import "../EventVerifier.sol";
8 |
9 | /*
10 | TriggerEventVerifier
11 |
12 | Inherits from `EventVerifier` and verifies `Triggered` events.
13 |
14 | From the provided logs, we separate the data and define checks to assert certain information in the event and
15 | returns `true` if successful.
16 |
17 | Contracts similar to this that verify specific events should be designed only to verify the data inside the
18 | supplied events with similarly supplied expected outcomes. It is only meant to serve as a utility to perform defined
19 | checks against specific events.
20 | */
21 | contract TriggerEventVerifier is EventVerifier {
22 | bytes32 eventSignature = keccak256("Triggered(address)");
23 |
24 | function verify(bytes20 _contractEmittedAddress, bytes memory _rlpReceipt, bytes20 _expectedAddress) public view returns (bool) {
25 | // Retrieve specific log for given event signature
26 | RLP.RLPItem[] memory log = retrieveLog(eventSignature, _contractEmittedAddress, _rlpReceipt);
27 |
28 | // Split logs into constituents. Not all constituents are used here
29 | bytes memory contractEmittedEvent = RLP.toData(log[0]);
30 | RLP.RLPItem[] memory topics = RLP.toList(log[1]);
31 | bytes memory data = RLP.toData(log[2]);
32 |
33 | /*
34 | This section below is specific to this event verifier and checks the relevant data.
35 | In this event we only expect a single un-indexed address parameter which will be present in the data field.
36 | The data field pads it's contents if they are less than 32 bytes. Therefore we know that our address parameter
37 | exists in the 20 least significant bytes of the data field.
38 |
39 | We copy the last 20 bytes of our data field to a bytes20 variable to compare against the supplied expected
40 | parameter in the event from our function call. This acts as our conditional check that the event called is what
41 | the user expects.
42 | */
43 | bytes20 b20_address = SolUtils.BytesToBytes20(data, data.length - 20);
44 | return b20_address == _expectedAddress;
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/contracts/libraries/ECVerify.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | //
4 | // The new assembly support in Solidity makes writing helpers easy.
5 | // Many have complained how complex it is to use `ecrecover`, especially in conjunction
6 | // with the `eth_sign` RPC call. Here is a helper, which makes that a matter of a single call.
7 | //
8 | // Sample input parameters:
9 | // (with v=0)
10 | // "0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad",
11 | // "0xaca7da997ad177f040240cdccf6905b71ab16b74434388c3a72f34fd25d6439346b2bac274ff29b48b3ea6e2d04c1336eaceafda3c53ab483fc3ff12fac3ebf200",
12 | // "0x0e5cb767cce09a7f3ca594df118aa519be5e2b5a"
13 | //
14 | // (with v=1)
15 | // "0x47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad",
16 | // "0xdebaaa0cddb321b2dcaaf846d39605de7b97e77ba6106587855b9106cb10421561a22d94fa8b8a687ff9c911c844d1c016d1a685a9166858f9c7c1bc85128aca01",
17 | // "0x8743523d96a1b2cbe0c6909653a56da18ed484af"
18 | //
19 | // (The hash is a hash of "hello world".)
20 | //
21 | // Written by Alex Beregszaszi (@axic), use it under the terms of the MIT license.
22 | //
23 |
24 | library ECVerify {
25 | // Duplicate Solidity's ecrecover, but catching the CALL return value
26 | function safer_ecrecover(bytes32 hash, uint8 v, bytes32 r, bytes32 s) view internal returns (address) {
27 | // We do our own memory management here. Solidity uses memory offset
28 | // 0x40 to store the current end of memory. We write past it (as
29 | // writes are memory extensions), but don't update the offset so
30 | // Solidity will reuse it. The memory used here is only needed for
31 | // this context.
32 |
33 | // FIXME: inline assembly can't access return values
34 | bool ret;
35 | address addr;
36 |
37 | assembly {
38 | let size := mload(0x40)
39 | mstore(size, hash)
40 | mstore(add(size, 32), v)
41 | mstore(add(size, 64), r)
42 | mstore(add(size, 96), s)
43 |
44 | // NOTE: we can reuse the request memory because we deal with
45 | // the return code
46 | ret := staticcall(3000, 1, size, 128, size, 32)
47 | addr := mload(size)
48 | }
49 |
50 | require( ret == true );
51 |
52 | return addr;
53 | }
54 |
55 | function ecrecovery(bytes32 hash, bytes memory sig) internal view returns (address) {
56 | bytes32 r;
57 | bytes32 s;
58 | uint8 v;
59 |
60 | require (sig.length == 65);
61 |
62 | // The signature format is a compact form of:
63 | // {bytes32 r}{bytes32 s}{uint8 v}
64 | // Compact means, uint8 is not padded to 32 bytes.
65 | assembly {
66 | r := mload(add(sig, 32))
67 | s := mload(add(sig, 64))
68 |
69 | // Here we are loading the last 32 bytes. We exploit the fact that
70 | // 'mload' will pad with zeroes if we overread.
71 | // There is no 'mload8' to do this, but that would be nicer.
72 | v := byte(0, mload(add(sig, 96)))
73 |
74 | // Alternative solution:
75 | // 'byte' is not working due to the Solidity parser, so lets
76 | // use the second best option, 'and'
77 | // v := and(mload(add(sig, 65)), 255)
78 | }
79 |
80 | // albeit non-transactional signatures are not specified by the YP, one would expect it
81 | // to match the YP range of [27, 28]
82 | //
83 | // geth uses [0, 1] and some clients have followed. This might change, see:
84 | // https://github.com/ethereum/go-ethereum/issues/2053
85 | if (v < 27)
86 | v += 27;
87 |
88 | require (v == 27 || v == 28);
89 |
90 | /* prefix might be needed for geth only
91 | * https://github.com/ethereum/go-ethereum/issues/3731
92 | */
93 | /* bytes memory prefix = "\x19Ethereum Signed Message:\n32";
94 | hash = keccak256(prefix, hash); */
95 | /* hash = sha3(prefix, hash); */
96 |
97 | return safer_ecrecover(hash, v, r, s);
98 | /* return ecrecover(hash, v, r, s); */
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/contracts/libraries/PatriciaTrie.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "./RLP.sol";
6 |
7 | library PatriciaTrie {
8 |
9 | function verifyProof(bytes memory _value, bytes memory _parentNodes, bytes memory _path, bytes32 _root) internal pure returns (bool) {
10 | RLP.RLPItem memory nodes = RLP.toRLPItem(_parentNodes);
11 | RLP.RLPItem[] memory parentNodes = RLP.toList(nodes);
12 |
13 | bytes32 currentNodeKey = _root;
14 |
15 | uint traversedNibbles = 0;
16 | bytes memory path = toNibbleArray(_path, false);
17 |
18 | for (uint i = 0; i < parentNodes.length; i++) {
19 | if (currentNodeKey != keccak256(RLP.toBytes(parentNodes[i]))) {
20 | return false;
21 | }
22 |
23 | RLP.RLPItem[] memory currentNode = RLP.toList(parentNodes[i]);
24 |
25 | if (currentNode.length == 17) {
26 | // Branch Node
27 | (currentNodeKey, traversedNibbles) = processBranchNode(currentNode, traversedNibbles, path, _value);
28 | } else if (currentNode.length == 2) {
29 | // Extension/Leaf Node
30 | (currentNodeKey, traversedNibbles) = processExtensionLeafNode(currentNode, traversedNibbles, path, _value);
31 | } else {
32 | return false;
33 | }
34 |
35 | // Read comment block below for explanation of this
36 | if (currentNodeKey == 0x0) {
37 | return traversedNibbles == 1;
38 | }
39 | }
40 |
41 | return false;
42 | }
43 |
44 | /**
45 | Node Processing
46 |
47 | processBranchNodes returns (bytes32 currentNodeKey, uint traversedNibbles)
48 | processExtensionLeafNode returns (bytes32 currentNodeKey, uint traversedNibbles)
49 |
50 | Due to the dual nature of how a branch node may be processed where the next node in the path could be either
51 | referenced by hash or nested in the branch node is the total RLP-encoded node is less than 32 bytes (nested node),
52 | we required separation of logic due to "stack-too-deep" issues and opted for a messy returning of reused variables.
53 | These returned variables now hold two purposes:
54 |
55 | * currentNodeKey (bytes32): Holds value of the hash of the next node to be processed. If processing is finished this
56 | value is 0x0.
57 | * traversedNibbles (uint): Tracks how many nibbles have been traversed. If processing is finished this value will
58 | be 0 if verification failed, and 1 if verification succeeded.
59 |
60 | The dual-functionality of these variables is the crux of how I avoided stack issues which makes the code somewhat
61 | unreadable. If there is an improvement to this algorithm that can make it more readable please share.
62 |
63 | */
64 |
65 | function processBranchNode(RLP.RLPItem[] memory _currentNode, uint _traversedNibbles, bytes memory _path, bytes memory _value) private pure returns (bytes32, uint) {
66 | // Return the value at the current node if we have reached the end of the path
67 | if (_traversedNibbles == _path.length) {
68 | return (0x0, checkNodeValue(_value, RLP.toBytes(_currentNode[16])) ? 1 : 0);
69 | }
70 |
71 | uint8 nextPathNibble = nibbleToUint8(_path[_traversedNibbles]);
72 | RLP.RLPItem memory nextNode = _currentNode[nextPathNibble];
73 | _traversedNibbles += 1;
74 |
75 | bytes32 currentNodeKey;
76 | if (RLP.toBytes(nextNode).length < 32) {
77 | //Nested 'Node'
78 | (currentNodeKey, _traversedNibbles) = processNestedNode(nextNode, _traversedNibbles, _path, _value);
79 | } else {
80 | currentNodeKey = RLP.toBytes32(_currentNode[nextPathNibble]);
81 | }
82 | return (currentNodeKey, _traversedNibbles);
83 | }
84 |
85 | function processExtensionLeafNode(
86 | RLP.RLPItem[] memory _currentNode,
87 | uint _traversedNibbles,
88 | bytes memory _path,
89 | bytes memory _value
90 | ) private pure returns (bytes32, uint) {
91 | bytes memory nextPathNibbles = RLP.toData(_currentNode[0]);
92 | _traversedNibbles += toNibbleArray(nextPathNibbles, true).length;
93 |
94 | if (_traversedNibbles == _path.length) {
95 | return (0x0, checkNodeValue(_value, RLP.toData(_currentNode[1])) ? 1 : 0);
96 | }
97 |
98 | // Reached a leaf before end of the path. Proof false.
99 | if (toNibbleArray(nextPathNibbles, true).length == 0) {
100 | return (0x0, 0);
101 | }
102 |
103 | bytes memory nextNodeKey = RLP.toData(_currentNode[1]);
104 | bytes32 currentNodeKey = bytesToBytes32(nextNodeKey, 0);
105 |
106 | return (currentNodeKey, _traversedNibbles);
107 | }
108 |
109 | function processNestedNode(RLP.RLPItem memory _nextNode, uint _traversedNibbles, bytes memory _path, bytes memory _value) private pure returns (bytes32, uint) {
110 | RLP.RLPItem[] memory currentNode = RLP.toList(_nextNode);
111 | if (currentNode.length == 17) {
112 | // Branch Node
113 | return processBranchNode(currentNode, _traversedNibbles, _path, _value);
114 | } else if (currentNode.length == 2) {
115 | // Leaf Node
116 | return processExtensionLeafNode(currentNode, _traversedNibbles, _path, _value);
117 | } else {
118 | return (0x0, 0);
119 | }
120 | }
121 |
122 | function checkNodeValue(bytes memory _expected, bytes memory _nodeValue) private pure returns (bool) {
123 | return keccak256(_expected) == keccak256(_nodeValue);
124 | }
125 |
126 | function toNibbleArray(bytes memory b, bool hexPrefixed) private pure returns (bytes memory) {
127 | bytes memory nibbleArray = new bytes(255);
128 |
129 | uint8 nibblesFound = 0;
130 | for (uint i = 0; i < b.length; i++) {
131 | byte[2] memory nibbles = byteToNibbles(b[i]);
132 |
133 | if (hexPrefixed && i == 0) {
134 | if (nibbles[0] == byte(0x01) || nibbles[0] == byte(0x03)) {
135 | nibbleArray[nibblesFound] = nibbles[1];
136 | nibblesFound += 1;
137 | }
138 | } else {
139 | nibbleArray[nibblesFound] = nibbles[0];
140 | nibbleArray[nibblesFound + 1] = nibbles[1];
141 | nibblesFound += 2;
142 | }
143 | }
144 |
145 | bytes memory finiteNibbleArray = new bytes(nibblesFound);
146 | for (uint j = 0; j < nibblesFound; j++) {
147 | finiteNibbleArray[j] = nibbleArray[j];
148 | }
149 | return finiteNibbleArray;
150 | }
151 |
152 | function byteToNibbles(byte b) private pure returns (byte[2] memory) {
153 | byte firstNibble = rightShift(b, 4);
154 | byte secondNibble = b & byte(0x0f);
155 |
156 | return [firstNibble, secondNibble];
157 | }
158 |
159 | function nibbleToUint8(byte nibble) private pure returns (uint8) {
160 | return uint8(nibble);
161 | }
162 |
163 | function leftShift(byte i, uint8 bits) private pure returns (byte) {
164 | return byte(uint8(i) * uint8(2) ** uint8(bits));
165 | }
166 |
167 | function rightShift(byte i, uint8 bits) private pure returns (byte) {
168 | return byte(uint8(i) / uint8(2) ** uint8(bits));
169 | }
170 |
171 | function bytesToBytes32(bytes memory b, uint offset) private pure returns (bytes32) {
172 | bytes32 out;
173 |
174 | for (uint i = 0; i < 32; i++) {
175 | out |= bytes32(b[offset + i] & 0xFF) >> (i * 8);
176 | }
177 | return out;
178 | }
179 | }
180 |
--------------------------------------------------------------------------------
/contracts/libraries/RLP.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 | /**
3 | * @title RLPReader
4 | *
5 | * RLPReader is used to read and parse RLP encoded data in memory.
6 | *
7 | * @author Andreas Olofsson (androlo1980@gmail.com)
8 | */
9 | library RLP {
10 |
11 | uint constant DATA_SHORT_START = 0x80;
12 | uint constant DATA_LONG_START = 0xB8;
13 | uint constant LIST_SHORT_START = 0xC0;
14 | uint constant LIST_LONG_START = 0xF8;
15 |
16 | uint constant DATA_LONG_OFFSET = 0xB7;
17 | uint constant LIST_LONG_OFFSET = 0xF7;
18 |
19 |
20 | struct RLPItem {
21 | uint _unsafe_memPtr; // Pointer to the RLP-encoded bytes.
22 | uint _unsafe_length; // Number of bytes. This is the full length of the string.
23 | }
24 |
25 | struct Iterator {
26 | RLPItem _unsafe_item; // Item that's being iterated over.
27 | uint _unsafe_nextPtr; // Position of the next item in the list.
28 | }
29 |
30 | /* Iterator */
31 |
32 | function next(Iterator memory self) internal pure returns (RLPItem memory subItem) {
33 | if(hasNext(self)) {
34 | uint ptr = self._unsafe_nextPtr;
35 | uint itemLength = _itemLength(ptr);
36 | subItem._unsafe_memPtr = ptr;
37 | subItem._unsafe_length = itemLength;
38 | self._unsafe_nextPtr = ptr + itemLength;
39 | }
40 | else
41 | revert();
42 | }
43 |
44 | function next(Iterator memory self, bool strict) internal pure returns (RLPItem memory subItem) {
45 | subItem = next(self);
46 | if(strict && !_validate(subItem))
47 | revert();
48 | }
49 |
50 | function hasNext(Iterator memory self) internal pure returns (bool) {
51 | RLPItem memory item = self._unsafe_item;
52 | return self._unsafe_nextPtr < item._unsafe_memPtr + item._unsafe_length;
53 | }
54 |
55 | /* RLPItem */
56 |
57 | /// @dev Creates an RLPItem from an array of RLP encoded bytes.
58 | /// @param self The RLP encoded bytes.
59 | /// @return An RLPItem
60 | function toRLPItem(bytes memory self) internal pure returns (RLPItem memory) {
61 | uint len = self.length;
62 | if (len == 0) {
63 | return RLPItem(0, 0);
64 | }
65 | uint memPtr;
66 | assembly {
67 | memPtr := add(self, 0x20)
68 | }
69 | return RLPItem(memPtr, len);
70 | }
71 |
72 | /// @dev Creates an RLPItem from an array of RLP encoded bytes.
73 | /// @param self The RLP encoded bytes.
74 | /// @param strict Will revert() if the data is not RLP encoded.
75 | /// @return An RLPItem
76 | function toRLPItem(bytes memory self, bool strict) internal pure returns (RLPItem memory) {
77 | RLPItem memory item = toRLPItem(self);
78 | if(strict) {
79 | uint len = self.length;
80 | if(_payloadOffset(item) > len)
81 | revert();
82 | if(_itemLength(item._unsafe_memPtr) != len)
83 | revert();
84 | if(!_validate(item))
85 | revert();
86 | }
87 | return item;
88 | }
89 |
90 | /// @dev Check if the RLP item is null.
91 | /// @param self The RLP item.
92 | /// @return 'true' if the item is null.
93 | function isNull(RLPItem memory self) internal pure returns (bool ret) {
94 | return self._unsafe_length == 0;
95 | }
96 |
97 | /// @dev Check if the RLP item is a list.
98 | /// @param self The RLP item.
99 | /// @return 'true' if the item is a list.
100 | function isList(RLPItem memory self) internal pure returns (bool ret) {
101 | if (self._unsafe_length == 0)
102 | return false;
103 | uint memPtr = self._unsafe_memPtr;
104 | assembly {
105 | ret := iszero(lt(byte(0, mload(memPtr)), 0xC0))
106 | }
107 | }
108 |
109 | /// @dev Check if the RLP item is data.
110 | /// @param self The RLP item.
111 | /// @return 'true' if the item is data.
112 | function isData(RLPItem memory self) internal pure returns (bool ret) {
113 | if (self._unsafe_length == 0)
114 | return false;
115 | uint memPtr = self._unsafe_memPtr;
116 | assembly {
117 | ret := lt(byte(0, mload(memPtr)), 0xC0)
118 | }
119 | }
120 |
121 | /// @dev Check if the RLP item is empty (string or list).
122 | /// @param self The RLP item.
123 | /// @return 'true' if the item is null.
124 | function isEmpty(RLPItem memory self) internal pure returns (bool ret) {
125 | if(isNull(self))
126 | return false;
127 | uint b0;
128 | uint memPtr = self._unsafe_memPtr;
129 | assembly {
130 | b0 := byte(0, mload(memPtr))
131 | }
132 | return (b0 == DATA_SHORT_START || b0 == LIST_SHORT_START);
133 | }
134 |
135 | /// @dev Get the number of items in an RLP encoded list.
136 | /// @param self The RLP item.
137 | /// @return The number of items.
138 | function items(RLPItem memory self) internal pure returns (uint) {
139 | if (!isList(self))
140 | return 0;
141 | uint b0;
142 | uint memPtr = self._unsafe_memPtr;
143 | assembly {
144 | b0 := byte(0, mload(memPtr))
145 | }
146 | uint pos = memPtr + _payloadOffset(self);
147 | uint last = memPtr + self._unsafe_length - 1;
148 | uint itms;
149 | while(pos <= last) {
150 | pos += _itemLength(pos);
151 | itms++;
152 | }
153 | return itms;
154 | }
155 |
156 | /// @dev Create an iterator.
157 | /// @param self The RLP item.
158 | /// @return An 'Iterator' over the item.
159 | function iterator(RLPItem memory self) internal pure returns (Iterator memory it) {
160 | if (!isList(self))
161 | revert();
162 | uint ptr = self._unsafe_memPtr + _payloadOffset(self);
163 | it._unsafe_item = self;
164 | it._unsafe_nextPtr = ptr;
165 | }
166 |
167 | /// @dev Return the RLP encoded bytes.
168 | /// @param self The RLPItem.
169 | /// @return The bytes.
170 | function toBytes(RLPItem memory self) internal pure returns (bytes memory bts) {
171 | uint len = self._unsafe_length;
172 | bts = new bytes(len);
173 | if (len != 0) {
174 | _copyToBytes(self._unsafe_memPtr, bts, len);
175 | }
176 | }
177 |
178 | /// @dev Decode an RLPItem into bytes. This will not work if the
179 | /// RLPItem is a list.
180 | /// @param self The RLPItem.
181 | /// @return The decoded string.
182 | function toData(RLPItem memory self) internal pure returns (bytes memory bts) {
183 | if(!isData(self))
184 | revert();
185 | uint rStartPos;
186 | uint len;
187 | (rStartPos, len) = _decode(self);
188 | bts = new bytes(len);
189 | _copyToBytes(rStartPos, bts, len);
190 | }
191 |
192 | /// @dev Get the list of sub-items from an RLP encoded list.
193 | /// Warning: This is inefficient, as it requires that the list is read twice.
194 | /// @param self The RLP item.
195 | /// @return Array of RLPItems.
196 | function toList(RLPItem memory self) internal pure returns (RLPItem[] memory list) {
197 | if(!isList(self))
198 | revert();
199 | uint numItems = items(self);
200 | list = new RLPItem[](numItems);
201 | Iterator memory it = iterator(self);
202 | uint idx;
203 | while(hasNext(it)) {
204 | list[idx] = next(it);
205 | idx++;
206 | }
207 | }
208 |
209 | /// @dev Decode an RLPItem into an ascii string. This will not work if the
210 | /// RLPItem is a list.
211 | /// @param self The RLPItem.
212 | /// @return The decoded string.
213 | function toAscii(RLPItem memory self) internal pure returns (string memory str) {
214 | if(!isData(self))
215 | revert();
216 | uint rStartPos;
217 | uint len;
218 | (rStartPos, len) = _decode(self);
219 | bytes memory bts = new bytes(len);
220 | _copyToBytes(rStartPos, bts, len);
221 | str = string(bts);
222 | }
223 |
224 | /// @dev Decode an RLPItem into a uint. This will not work if the
225 | /// RLPItem is a list.
226 | /// @param self The RLPItem.
227 | /// @return The decoded string.
228 | function toUint(RLPItem memory self) internal pure returns (uint data) {
229 | if(!isData(self))
230 | revert();
231 | uint rStartPos;
232 | uint len;
233 | (rStartPos, len) = _decode(self);
234 | if (len > 32)
235 | revert();
236 | else if (len == 0)
237 | return 0;
238 | assembly {
239 | data := div(mload(rStartPos), exp(256, sub(32, len)))
240 | }
241 | }
242 |
243 | /// @dev Decode an RLPItem into a boolean. This will not work if the
244 | /// RLPItem is a list.
245 | /// @param self The RLPItem.
246 | /// @return The decoded string.
247 | function toBool(RLPItem memory self) internal pure returns (bool data) {
248 | if(!isData(self))
249 | revert();
250 | uint rStartPos;
251 | uint len;
252 | (rStartPos, len) = _decode(self);
253 | if (len != 1)
254 | revert();
255 | uint temp;
256 | assembly {
257 | temp := byte(0, mload(rStartPos))
258 | }
259 | if (temp > 1)
260 | revert();
261 | return temp == 1 ? true : false;
262 | }
263 |
264 | /// @dev Decode an RLPItem into a byte. This will not work if the
265 | /// RLPItem is a list.
266 | /// @param self The RLPItem.
267 | /// @return The decoded string.
268 | function toByte(RLPItem memory self) internal pure returns (byte data) {
269 | if(!isData(self))
270 | revert();
271 | uint rStartPos;
272 | uint len;
273 | (rStartPos, len) = _decode(self);
274 | if (len != 1)
275 | revert();
276 | byte temp;
277 | assembly {
278 | temp := byte(0, mload(rStartPos))
279 | }
280 | return temp;
281 | }
282 |
283 | /// @dev Decode an RLPItem into an int. This will not work if the
284 | /// RLPItem is a list.
285 | /// @param self The RLPItem.
286 | /// @return The decoded string.
287 | function toInt(RLPItem memory self) internal pure returns (int data) {
288 | return int(toUint(self));
289 | }
290 |
291 | /// @dev Decode an RLPItem into a bytes32. This will not work if the
292 | /// RLPItem is a list.
293 | /// @param self The RLPItem.
294 | /// @return The decoded string.
295 | function toBytes32(RLPItem memory self) internal pure returns (bytes32 data) {
296 | return bytes32(toUint(self));
297 | }
298 |
299 | /// @dev Decode an RLPItem into an address. This will not work if the
300 | /// RLPItem is a list.
301 | /// @param self The RLPItem.
302 | /// @return The decoded string.
303 | function toAddress(RLPItem memory self) internal pure returns (address data) {
304 | if(!isData(self))
305 | revert();
306 | uint rStartPos;
307 | uint len;
308 | (rStartPos, len) = _decode(self);
309 | if (len != 20)
310 | revert();
311 | assembly {
312 | data := div(mload(rStartPos), exp(256, 12))
313 | }
314 | }
315 |
316 | // Get the payload offset.
317 | function _payloadOffset(RLPItem memory self) private pure returns (uint) {
318 | if(self._unsafe_length == 0)
319 | return 0;
320 | uint b0;
321 | uint memPtr = self._unsafe_memPtr;
322 | assembly {
323 | b0 := byte(0, mload(memPtr))
324 | }
325 | if(b0 < DATA_SHORT_START)
326 | return 0;
327 | if(b0 < DATA_LONG_START || (b0 >= LIST_SHORT_START && b0 < LIST_LONG_START))
328 | return 1;
329 | if(b0 < LIST_SHORT_START)
330 | return b0 - DATA_LONG_OFFSET + 1;
331 | return b0 - LIST_LONG_OFFSET + 1;
332 | }
333 |
334 | // Get the full length of an RLP item.
335 | function _itemLength(uint memPtr) private pure returns (uint len) {
336 | uint b0;
337 | assembly {
338 | b0 := byte(0, mload(memPtr))
339 | }
340 | if (b0 < DATA_SHORT_START)
341 | len = 1;
342 | else if (b0 < DATA_LONG_START)
343 | len = b0 - DATA_SHORT_START + 1;
344 | else if (b0 < LIST_SHORT_START) {
345 | assembly {
346 | let bLen := sub(b0, 0xB7) // bytes length (DATA_LONG_OFFSET)
347 | let dLen := div(mload(add(memPtr, 1)), exp(256, sub(32, bLen))) // data length
348 | len := add(1, add(bLen, dLen)) // total length
349 | }
350 | }
351 | else if (b0 < LIST_LONG_START)
352 | len = b0 - LIST_SHORT_START + 1;
353 | else {
354 | assembly {
355 | let bLen := sub(b0, 0xF7) // bytes length (LIST_LONG_OFFSET)
356 | let dLen := div(mload(add(memPtr, 1)), exp(256, sub(32, bLen))) // data length
357 | len := add(1, add(bLen, dLen)) // total length
358 | }
359 | }
360 | }
361 |
362 | // Get start position and length of the data.
363 | function _decode(RLPItem memory self) private pure returns (uint memPtr, uint len) {
364 | if(!isData(self))
365 | revert();
366 | uint b0;
367 | uint start = self._unsafe_memPtr;
368 | assembly {
369 | b0 := byte(0, mload(start))
370 | }
371 | if (b0 < DATA_SHORT_START) {
372 | memPtr = start;
373 | len = 1;
374 | return (memPtr, len);
375 | }
376 | if (b0 < DATA_LONG_START) {
377 | len = self._unsafe_length - 1;
378 | memPtr = start + 1;
379 | } else {
380 | uint bLen;
381 | assembly {
382 | bLen := sub(b0, 0xB7) // DATA_LONG_OFFSET
383 | }
384 | len = self._unsafe_length - 1 - bLen;
385 | memPtr = start + bLen + 1;
386 | }
387 | return (memPtr, len);
388 | }
389 |
390 | // Assumes that enough memory has been allocated to store in target.
391 | function _copyToBytes(uint btsPtr, bytes memory tgt, uint btsLen) private pure {
392 | // Exploiting the fact that 'tgt' was the last thing to be allocated,
393 | // we can write entire words, and just overwrite any excess.
394 | assembly {
395 | {
396 | let words := div(add(btsLen, 31), 32)
397 | let rOffset := btsPtr
398 | let wOffset := add(tgt, 0x20)
399 | for
400 | { let i := 0 } // Start at arr + 0x20
401 | lt(i, words)
402 | { i := add(i, 1) }
403 | {
404 | let offset := mul(i, 0x20)
405 | mstore(add(wOffset, offset), mload(add(rOffset, offset)))
406 | }
407 | mstore(add(tgt, add(0x20, mload(tgt))), 0)
408 | }
409 | }
410 | }
411 |
412 | // Check that an RLP item is valid.
413 | function _validate(RLPItem memory self) private pure returns (bool ret) {
414 | // Check that RLP is well-formed.
415 | uint b0;
416 | uint b1;
417 | uint memPtr = self._unsafe_memPtr;
418 | assembly {
419 | b0 := byte(0, mload(memPtr))
420 | b1 := byte(1, mload(memPtr))
421 | }
422 | if(b0 == DATA_SHORT_START + 1 && b1 < DATA_SHORT_START)
423 | return false;
424 | return true;
425 | }
426 | }
--------------------------------------------------------------------------------
/contracts/libraries/SafeMath.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 |
6 | /**
7 | * Math operations with safety checks
8 | */
9 | library SafeMath {
10 | function mul(uint a, uint b)
11 | internal pure returns (uint)
12 | {
13 | uint c = a * b;
14 | assert(a == 0 || c / a == b);
15 | return c;
16 | }
17 |
18 | function div(uint a, uint b)
19 | internal pure returns (uint)
20 | {
21 | // assert(b > 0); // Solidity automatically throws when dividing by 0
22 | uint c = a / b;
23 | // assert(a == b * c + a % b); // There is no case in which this doesn't hold
24 | return c;
25 | }
26 |
27 | function sub(uint a, uint b)
28 | internal pure returns (uint)
29 | {
30 | assert(b <= a);
31 | return a - b;
32 | }
33 |
34 | function add(uint a, uint b)
35 | internal pure returns (uint)
36 | {
37 | uint c = a + b;
38 | assert(c >= a);
39 | return c;
40 | }
41 |
42 | function max64(uint64 a, uint64 b)
43 | internal pure returns (uint64)
44 | {
45 | return a >= b ? a : b;
46 | }
47 |
48 | function min64(uint64 a, uint64 b)
49 | internal pure returns (uint64)
50 | {
51 | return a < b ? a : b;
52 | }
53 |
54 | function max256(uint256 a, uint256 b)
55 | internal pure returns (uint256)
56 | {
57 | return a >= b ? a : b;
58 | }
59 |
60 | function min256(uint256 a, uint256 b)
61 | internal pure returns (uint256)
62 | {
63 | return a < b ? a : b;
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/contracts/libraries/SolidityUtils.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | // A library of funky data manipulation stuff
6 | library SolUtils {
7 | /*
8 | * @description copies 32 bytes from input into the output
9 | * @param output memory allocation for the data you need to extract
10 | * @param input array from which the data should be extracted
11 | * @param buf index which the data starts within the byte array needs to have 32 bytes appended
12 | */
13 | function BytesToBytes32(bytes memory input, uint256 buf) internal pure returns (bytes32 output) {
14 | buf = buf + 32;
15 | assembly {
16 | output := mload(add(input, buf))
17 | }
18 | }
19 |
20 | /*
21 | * @description copies 20 bytes from input into the output
22 | * @param output memory allocation for the data you need to extract
23 | * @param input array from which the data should be extracted
24 | * @param buf index which the data starts within the byte array needs to have 32 bytes appended
25 | */
26 | function BytesToBytes20(bytes memory input, uint256 buf) internal pure returns (bytes20) {
27 | bytes20 output;
28 |
29 | for (uint i = 0; i < 20; i++) {
30 | output |= bytes20(input[buf + i] & 0xFF) >> (i * 8);
31 | }
32 | return output;
33 | }
34 |
35 | /*
36 | * @description copies 20 bytes from input into the output returning an address
37 | * @param output memory allocation for the data you need to extract
38 | * @param input array from which the data should be extracted
39 | * @param buf index which the data starts within the byte array needs to have 32 bytes appended
40 | */
41 | function BytesToAddress(bytes memory input, uint256 buf) internal pure returns (address output) {
42 | buf = buf + 20;
43 | assembly {
44 | output := mload(add(input, buf))
45 | }
46 | }
47 |
48 | /*
49 | * @description copies output.length bytes from the input into the output
50 | * @param output memory allocation for the data you need to extract
51 | * @param input array from which the data should be extracted
52 | * @param buf index which the data starts within the byte array
53 | */
54 | function BytesToBytes(bytes memory output, bytes memory input, uint256 buf) view internal {
55 | uint256 outputLength = output.length;
56 | buf = buf + 32; // Append 32 as we need to point past the variable type definition
57 | assembly {
58 | let ret := staticcall(3000, 4, add(input, buf), outputLength, add(output, 32), outputLength)
59 | }
60 | }
61 |
62 | function UintToString(uint _i) internal pure returns (string memory _uintAsString) {
63 | if (_i == 0) {
64 | return "0";
65 | }
66 | uint j = _i;
67 | uint len;
68 | while (j != 0) {
69 | len++;
70 | j /= 10;
71 | }
72 | bytes memory bstr = new bytes(len);
73 | uint k = len - 1;
74 | while (_i != 0) {
75 | bstr[k--] = byte(uint8(48 + _i % 10));
76 | _i /= 10;
77 | }
78 | return string(bstr);
79 | }
80 |
81 | function BoolToString(bool _b) internal pure returns (string memory) {
82 | if (_b)
83 | return "true";
84 | else
85 | return "false";
86 | }
87 |
88 | }
89 |
--------------------------------------------------------------------------------
/contracts/mock/MockIon.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../Ion.sol";
4 | import "../storage/BlockStore.sol";
5 |
6 | contract MockIon is Ion {
7 |
8 | constructor(bytes32 _id) public Ion(_id) {}
9 |
10 | function registerValidationModule() public {
11 | require( isContract(msg.sender), "Caller address is not a valid contract. Please inherit the BlockStore contract for proper usage." );
12 | require( !m_registered_validation[msg.sender], "Validation module has already been registered." );
13 |
14 | m_registered_validation[msg.sender] = true;
15 | validation_modules.push(msg.sender);
16 | }
17 |
18 | function addChain(address _storageAddress, bytes32 _chainId) public {
19 | BlockStore store = BlockStore(_storageAddress);
20 | store.addChain(_chainId);
21 | }
22 |
23 | function storeBlock(address _storageAddress, bytes32 _chainId, bytes memory _blockBlob) public {
24 | BlockStore store = BlockStore(_storageAddress);
25 | store.addBlock(_chainId, _blockBlob);
26 | }
27 | }
--------------------------------------------------------------------------------
/contracts/mock/MockStorage.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../storage/BlockStore.sol";
4 |
5 | /*
6 | Mock Block Store contract
7 |
8 | This mocking contract is used to simulate interactions and asserting certain return data from interaction via other
9 | contracts being tested. Use as a tool for testing ONLY.
10 |
11 | This is not an accurate representation of a block store contract and should not be used in any way as a
12 | representation of a block store contract. Please refer to BlockStore.sol and inherit functionality from that base
13 | contract and see EthereumStore.sol for more implementation details.
14 |
15 | */
16 |
17 | contract MockStorage is BlockStore {
18 |
19 | constructor(address _ionAddr) BlockStore(_ionAddr) public {}
20 |
21 | event AddedBlock();
22 | function addBlock(bytes32 _chainId, bytes memory _blockBlob) public {
23 | emit AddedBlock();
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/contracts/mock/MockValidation.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../IonCompatible.sol";
4 |
5 | contract MockValidation is IonCompatible {
6 | constructor (address _ionAddr) IonCompatible(_ionAddr) public {}
7 |
8 | function register() public returns (bool) {
9 | ion.registerValidationModule();
10 | return true;
11 | }
12 |
13 | function SubmitBlock(address _storageAddress, bytes32 _chainId, bytes memory _blockBlob) public {
14 | ion.storeBlock(_storageAddress, _chainId, _blockBlob);
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/contracts/storage/BlockStore.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../IonCompatible.sol";
4 |
5 | contract BlockStore is IonCompatible {
6 | bytes32[] public registeredChains;
7 |
8 | mapping (bytes32 => bool) public m_chains;
9 |
10 | modifier onlyIon() {
11 | require(msg.sender == address(ion), "Block does not exist for chain");
12 | _;
13 | }
14 |
15 | /*
16 | * onlyRegisteredChains
17 | * param: _id (bytes32) Unique id of chain supplied to function
18 | *
19 | * Modifier that checks if the provided chain id has been registered to this contract
20 | */
21 | modifier onlyRegisteredChains(bytes32 _chainId) {
22 | require(m_chains[_chainId], "Chain is not registered");
23 | _;
24 | }
25 |
26 | /*
27 | * Constructor
28 | * param: id (bytes32) Unique id to identify this chain that the contract is being deployed to.
29 | *
30 | * Supplied with a unique id to identify this chain to others that may interoperate with it.
31 | * The deployer must assert that the id is indeed public and that it is not already being used
32 | * by another chain
33 | */
34 | constructor(address _ionAddr) IonCompatible(_ionAddr) public {}
35 |
36 | /*
37 | * addChain
38 | * param: id (bytes32) Unique id of another chain to interoperate with
39 | *
40 | * Supplied with an id of another chain, checks if this id already exists in the known set of ids
41 | * and adds it to the list of known m_chains.
42 | *
43 | *Should be called by the validation registerChain() function
44 | */
45 | function addChain(bytes32 _chainId) onlyIon public returns (bool) {
46 | require( _chainId != ion.chainId(), "Cannot add this chain id to chain register" );
47 | require(!m_chains[_chainId], "Chain already exists" );
48 |
49 | m_chains[_chainId] = true;
50 | registeredChains.push(_chainId);
51 |
52 | return true;
53 | }
54 |
55 | function addBlock(bytes32 _chainId, bytes memory _blockBlob) public;
56 | }
57 |
--------------------------------------------------------------------------------
/contracts/storage/EthereumStore.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "../libraries/ECVerify.sol";
6 | import "../libraries/RLP.sol";
7 | import "../libraries/PatriciaTrie.sol";
8 | import "../libraries/SolidityUtils.sol";
9 | import "./BlockStore.sol";
10 |
11 | contract EthereumStore is BlockStore {
12 | using RLP for RLP.RLPItem;
13 | using RLP for RLP.Iterator;
14 | using RLP for bytes;
15 |
16 | /*
17 | * @description BlockHeader struct containing trie root hashes for tx verifications
18 | */
19 | struct BlockHeader {
20 | bytes32 txRootHash;
21 | bytes32 receiptRootHash;
22 | }
23 |
24 | mapping (bytes32 => bool) public m_blockhashes;
25 | mapping (bytes32 => BlockHeader) public m_blockheaders;
26 |
27 | enum ProofType { TX, RECEIPT, ROOTS }
28 |
29 | event BlockAdded(bytes32 chainId, bytes32 blockHash);
30 | event VerifiedProof(bytes32 chainId, bytes32 blockHash, uint proofType);
31 |
32 | constructor(address _ionAddr) BlockStore(_ionAddr) public {}
33 |
34 | /*
35 | * onlyExistingBlocks
36 | * param: _id (bytes32) Unique id of chain supplied to function
37 | * param: _hash (bytes32) Block hash which needs validation
38 | *
39 | * Modifier that checks if the provided block hash has been verified by the validation contract
40 | */
41 | modifier onlyExistingBlocks(bytes32 _hash) {
42 | require(m_blockhashes[_hash], "Block does not exist for chain");
43 | _;
44 | }
45 |
46 |
47 | /*
48 | * @description when a block is submitted the header must be added to a mapping of blockhashes and m_chains to blockheaders
49 | * @param _chainId ID of the chain the block is from
50 | * @param _blockHash Block hash of the block being added
51 | * @param _blockBlob Bytes blob of the RLP-encoded block header being added
52 | */
53 | function addBlock(bytes32 _chainId, bytes memory _blockBlob)
54 | public
55 | onlyIon
56 | onlyRegisteredChains(_chainId)
57 | {
58 | bytes32 blockHash = keccak256(_blockBlob);
59 | require(!m_blockhashes[blockHash], "Block already exists" );
60 |
61 | RLP.RLPItem[] memory header = _blockBlob.toRLPItem().toList();
62 | require(header.length == 15, "Block Header parameter mismatch");
63 |
64 | m_blockhashes[blockHash] = true;
65 | m_blockheaders[blockHash].txRootHash = header[4].toBytes32();
66 | m_blockheaders[blockHash].receiptRootHash = header[5].toBytes32();
67 |
68 | emit BlockAdded(_chainId, blockHash);
69 | }
70 |
71 | function CheckProofs(bytes32 _chainId, bytes32 _blockHash, bytes memory _proof) public returns (bytes memory) {
72 | RLP.RLPItem[] memory proof = _proof.toRLPItem().toList();
73 |
74 | require(proof.length == 5, "Malformed proof");
75 |
76 | assert(CheckRootsProof(_chainId, _blockHash, proof[2].toBytes(), proof[4].toBytes()));
77 | assert(CheckTxProof(_chainId, _blockHash, proof[1].toBytes(), proof[2].toBytes(), proof[0].toBytes()));
78 | assert(CheckReceiptProof(_chainId, _blockHash, proof[3].toBytes(), proof[4].toBytes(), proof[0].toBytes()));
79 |
80 | return proof[3].toBytes();
81 | }
82 |
83 | /*
84 | * CheckTxProof
85 | * param: _id (bytes32) Unique id of chain submitting block from
86 | * param: _blockHash (bytes32) Block hash of block being submitted
87 | * param: _value (bytes) RLP-encoded transaction object array with fields defined as: https://github.com/ethereumjs/ethereumjs-tx/blob/0358fad36f6ebc2b8bea441f0187f0ff0d4ef2db/index.js#L50
88 | * param: _parentNodes (bytes) RLP-encoded array of all relevant nodes from root node to node to prove
89 | * param: _path (bytes) Byte array of the path to the node to be proved
90 | *
91 | * emits: VerifiedTxProof(chainId, blockHash, proofType)
92 | * chainId: (bytes32) hash of the chain verifying proof against
93 | * blockHash: (bytes32) hash of the block verifying proof against
94 | * proofType: (uint) enum of proof type
95 | *
96 | * All data associated with the proof must be constructed and provided to this function. Modifiers restrict execution
97 | * of this function to only allow if the chain the proof is for is registered to this contract and if the block that
98 | * the proof is for has been submitted.
99 | */
100 | function CheckTxProof(
101 | bytes32 _chainId,
102 | bytes32 _blockHash,
103 | bytes memory _value,
104 | bytes memory _parentNodes,
105 | bytes memory _path
106 | )
107 | onlyRegisteredChains(_chainId)
108 | onlyExistingBlocks(_blockHash)
109 | internal
110 | returns (bool)
111 | {
112 | verifyProof(_value, _parentNodes, _path, m_blockheaders[_blockHash].txRootHash);
113 |
114 | emit VerifiedProof(_chainId, _blockHash, uint(ProofType.TX));
115 | return true;
116 | }
117 |
118 | /*
119 | * CheckReceiptProof
120 | * param: _id (bytes32) Unique id of chain submitting block from
121 | * param: _blockHash (bytes32) Block hash of block being submitted
122 | * param: _value (bytes) RLP-encoded receipt object array with fields defined as: https://github.com/ethereumjs/ethereumjs-tx/blob/0358fad36f6ebc2b8bea441f0187f0ff0d4ef2db/index.js#L50
123 | * param: _parentNodes (bytes) RLP-encoded array of all relevant nodes from root node to node to prove
124 | * param: _path (bytes) Byte array of the path to the node to be proved
125 | *
126 | * emits: VerifiedTxProof(chainId, blockHash, proofType)
127 | * chainId: (bytes32) hash of the chain verifying proof against
128 | * blockHash: (bytes32) hash of the block verifying proof against
129 | * proofType: (uint) enum of proof type
130 | *
131 | * All data associated with the proof must be constructed and paddChainrovided to this function. Modifiers restrict execution
132 | * of this function to only allow if the chain the proof is for is registered to this contract and if the block that
133 | * the proof is for has been submitted.
134 | */
135 | function CheckReceiptProof(
136 | bytes32 _chainId,
137 | bytes32 _blockHash,
138 | bytes memory _value,
139 | bytes memory _parentNodes,
140 | bytes memory _path
141 | )
142 | onlyRegisteredChains(_chainId)
143 | onlyExistingBlocks(_blockHash)
144 | internal
145 | returns (bool)
146 | {
147 | verifyProof(_value, _parentNodes, _path, m_blockheaders[_blockHash].receiptRootHash);
148 |
149 | emit VerifiedProof(_chainId, _blockHash, uint(ProofType.RECEIPT));
150 | return true;
151 | }
152 |
153 | /*
154 | * CheckRootsProof
155 | * param: _id (bytes32) Unique id of chain submitting block from
156 | * param: _blockHash (bytes32) Block hash of block being submitted
157 | * param: _txNodes (bytes) RLP-encoded relevant nodes of the Tx trie
158 | * param: _receiptNodes (bytes) RLP-encoded relevant nodes of the Receipt trie
159 | *
160 | * emits: VerifiedTxProof(chainId, blockHash, proofType)
161 | * chainId: (bytes32) hash of the chain verifying proof against
162 | * blockHash: (bytes32) hash of the block verifying proof against
163 | * proofType: (uint) enum of proof type
164 | *
165 | * All data associated with the proof must be constructed and provided to this function. Modifiers restrict execution
166 | * of this function to only allow if the chain the proof is for is registered to this contract and if the block that
167 | * the proof is for has been submitted.
168 | */
169 | function CheckRootsProof(
170 | bytes32 _chainId,
171 | bytes32 _blockHash,
172 | bytes memory _txNodes,
173 | bytes memory _receiptNodes
174 | )
175 | onlyRegisteredChains(_chainId)
176 | onlyExistingBlocks(_blockHash)
177 | internal
178 | returns (bool)
179 | {
180 | assert( m_blockheaders[_blockHash].txRootHash == getRootNodeHash(_txNodes) );
181 | assert( m_blockheaders[_blockHash].receiptRootHash == getRootNodeHash(_receiptNodes) );
182 |
183 | emit VerifiedProof(_chainId, _blockHash, uint(ProofType.ROOTS));
184 | return true;
185 | }
186 |
187 | function verifyProof(bytes memory _value, bytes memory _parentNodes, bytes memory _path, bytes32 _hash) internal {
188 | assert( PatriciaTrie.verifyProof(_value, _parentNodes, _path, _hash) );
189 | }
190 |
191 | /*
192 | ========================================================================================================================
193 |
194 | Helper Functions
195 |
196 | ========================================================================================================================
197 | */
198 |
199 | /*
200 | * @description returns the root node of an RLP encoded Patricia Trie
201 | * @param _rlpNodes RLP encoded trie
202 | * @returns root hash
203 | */
204 | function getRootNodeHash(bytes memory _rlpNodes) private pure returns (bytes32) {
205 | RLP.RLPItem[] memory nodeList = _rlpNodes.toRLPItem().toList();
206 |
207 | bytes memory b_nodeRoot = RLP.toBytes(nodeList[0]);
208 |
209 | return keccak256(b_nodeRoot);
210 | }
211 |
212 |
213 | }
214 |
215 |
--------------------------------------------------------------------------------
/contracts/storage/FabricStore.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "./BlockStore.sol";
4 | import "../libraries/RLP.sol";
5 | import "../libraries/SolidityUtils.sol";
6 |
7 | contract FabricStore is BlockStore {
8 | using RLP for RLP.RLPItem;
9 | using RLP for RLP.Iterator;
10 | using RLP for bytes;
11 |
12 | struct Chain {
13 | bytes32 id;
14 | mapping (string => Channel) m_channels;
15 | }
16 |
17 | struct Channel {
18 | string id;
19 | mapping (string => bool) blocks;
20 | mapping (string => Block) m_blocks;
21 | mapping (string => Transaction) m_transactions;
22 | mapping (string => bool) m_transactions_exist;
23 | mapping (string => State) m_state;
24 | }
25 |
26 | struct Block {
27 | uint number;
28 | string hash;
29 | string prevHash;
30 | string dataHash;
31 | uint timestamp_s;
32 | uint timestamp_nanos;
33 | string[] transactions;
34 | }
35 |
36 | struct Transaction {
37 | string id;
38 | string blockHash;
39 | string[] namespaces;
40 | mapping (string => Namespace) m_nsrw;
41 | }
42 |
43 | struct Namespace {
44 | string namespace;
45 | ReadSet[] reads;
46 | WriteSet[] writes;
47 | }
48 |
49 | struct ReadSet {
50 | string key;
51 | RSVersion version;
52 | }
53 |
54 | struct RSVersion {
55 | uint blockNo;
56 | uint txNo;
57 | }
58 |
59 | struct WriteSet {
60 | string key;
61 | bool isDelete;
62 | string value;
63 | }
64 |
65 | struct State {
66 | string key;
67 | RSVersion version;
68 | string value;
69 | }
70 |
71 | mapping (bytes32 => Chain) public m_networks;
72 |
73 | constructor(address _ionAddr) BlockStore(_ionAddr) public {}
74 |
75 | event BlockAdded(bytes32 chainId, string channelId, string blockHash);
76 |
77 | function addChain(bytes32 _chainId) onlyIon public returns (bool) {
78 | require(super.addChain(_chainId), "Storage addChain parent call failed");
79 |
80 | Chain storage chain = m_networks[_chainId];
81 | chain.id = _chainId;
82 |
83 | return true;
84 | }
85 |
86 | // Function name is inaccurate for Fabric due to blocks being a sub-structure to a channel
87 | // Will need refactoring
88 | function addBlock(bytes32 _chainId, bytes memory _blockBlob)
89 | public
90 | onlyIon
91 | onlyRegisteredChains(_chainId)
92 | {
93 | RLP.RLPItem[] memory data = _blockBlob.toRLPItem().toList();
94 |
95 | // Iterate all channel objects in the data structure
96 | for (uint i = 0; i < data.length; i++) {
97 | decodeChannelObject(_chainId, data[i].toBytes());
98 | }
99 | }
100 |
101 | function decodeChannelObject(bytes32 _chainId, bytes memory _channelRLP) internal {
102 | RLP.RLPItem[] memory channelRLP = _channelRLP.toRLPItem().toList();
103 |
104 | string memory channelId = channelRLP[0].toAscii();
105 | Channel storage channel = m_networks[_chainId].m_channels[channelId];
106 |
107 | // Currently adds the channel if it does not exist. This may need changing.
108 | if (keccak256(abi.encodePacked(channel.id)) == keccak256(abi.encodePacked(""))) {
109 | channel.id = channelId;
110 | }
111 |
112 | // RLP.RLPItem[] memory blocksRLP = channelRLP[1].toList();
113 | //
114 | // // Iterate all blocks in the channel structure. Currently not used as we only focus on parsing single blocks
115 | // for (uint i = 0; i < blocksRLP.length; i++) {
116 | // Block memory block = decodeBlockObject(_chainId, channelId, channelRLP[1].toBytes());
117 | // require(!channel.blocks[block.hash], "Block with identical hash already exists");
118 | // channel.blocks[block.hash] = true;
119 | // channel.m_blocks[block.hash] = block;
120 | //
121 | // emit BlockAdded(_chainId, channelId, block.hash);
122 | // }
123 |
124 | Block memory blk = decodeBlockObject(_chainId, channelId, channelRLP[1].toBytes());
125 | require(!channel.blocks[blk.hash], "Block with identical hash already exists");
126 |
127 | mutateState(_chainId, channelId, blk);
128 |
129 | channel.blocks[blk.hash] = true;
130 | channel.m_blocks[blk.hash] = blk;
131 |
132 | emit BlockAdded(_chainId, channelId, blk.hash);
133 | }
134 |
135 | function decodeBlockObject(bytes32 _chainId, string memory _channelId, bytes memory _blockRLP) internal returns (Block memory) {
136 | RLP.RLPItem[] memory blockRLP = _blockRLP.toRLPItem().toList();
137 |
138 | string memory blockHash = blockRLP[0].toAscii();
139 |
140 | Block memory blk;
141 |
142 | blk.number = blockRLP[1].toUint();
143 | blk.hash = blockHash;
144 | blk.prevHash = blockRLP[2].toAscii();
145 | blk.dataHash = blockRLP[3].toAscii();
146 | blk.timestamp_s = blockRLP[4].toUint();
147 | blk.timestamp_nanos = blockRLP[5].toUint();
148 |
149 | RLP.RLPItem[] memory txnsRLP = blockRLP[6].toList();
150 |
151 | blk.transactions = new string[](txnsRLP.length);
152 |
153 | // Iterate all transactions in the block
154 | for (uint i = 0; i < txnsRLP.length; i++) {
155 | string memory txId = decodeTxObject(txnsRLP[i].toBytes(), _chainId, _channelId);
156 | require(!isTransactionExists(_chainId, _channelId, txId), "Transaction already exists");
157 | blk.transactions[i] = txId;
158 | injectBlockHashToTx(_chainId, _channelId, txId, blockHash);
159 | flagTx(_chainId, _channelId, txId);
160 | }
161 |
162 | return blk;
163 | }
164 |
165 | function decodeTxObject(bytes memory _txRLP, bytes32 _chainId, string memory _channelId) internal returns (string memory) {
166 | RLP.RLPItem[] memory txRLP = _txRLP.toRLPItem().toList();
167 |
168 | Transaction storage txn = m_networks[_chainId].m_channels[_channelId].m_transactions[txRLP[0].toAscii()];
169 | txn.id = txRLP[0].toAscii();
170 |
171 | RLP.RLPItem[] memory namespacesRLP = txRLP[1].toList();
172 |
173 | // Iterate all namespace rwsets in the transaction
174 | for (uint i = 0; i < namespacesRLP.length; i++) {
175 | RLP.RLPItem[] memory nsrwRLP = namespacesRLP[i].toList();
176 |
177 | Namespace storage namespace = txn.m_nsrw[nsrwRLP[0].toAscii()];
178 | namespace.namespace = nsrwRLP[0].toAscii();
179 | txn.namespaces.push(nsrwRLP[0].toAscii());
180 |
181 | // Iterate all read sets in the namespace
182 | RLP.RLPItem[] memory readsetsRLP = nsrwRLP[1].toList();
183 | for (uint j = 0; j < readsetsRLP.length; j++) {
184 | namespace.reads.push(decodeReadset(readsetsRLP[j].toBytes()));
185 | }
186 |
187 | // Iterate all write sets in the namespace
188 | RLP.RLPItem[] memory writesetsRLP = nsrwRLP[2].toList();
189 | for (uint k = 0; k < writesetsRLP.length; k++) {
190 | namespace.writes.push(decodeWriteset(writesetsRLP[k].toBytes()));
191 | }
192 | }
193 |
194 | return txRLP[0].toAscii();
195 | }
196 |
197 | function mutateState(bytes32 _chainId, string memory _channelId, Block memory _blk) internal {
198 | string[] memory txIds = _blk.transactions;
199 |
200 | // Iterate across all transactions
201 | for (uint i = 0; i < txIds.length; i++) {
202 | Transaction storage txn = m_networks[_chainId].m_channels[_channelId].m_transactions[txIds[i]];
203 |
204 | // Iterate across all namespaces
205 | for (uint j = 0; j < txn.namespaces.length; j++) {
206 | string storage namespace = txn.namespaces[j];
207 |
208 | // Iterate across all writesets and check readset version of each write key against stored version
209 | for (uint k = 0; k < txn.m_nsrw[namespace].writes.length; k++) {
210 | State storage state = m_networks[_chainId].m_channels[_channelId].m_state[txn.m_nsrw[namespace].writes[k].key];
211 |
212 | if (keccak256(abi.encodePacked(state.key)) == keccak256(abi.encodePacked(txn.m_nsrw[namespace].writes[k].key))) {
213 | if (!isExpectedReadVersion(txn.m_nsrw[namespace], state.version, state.key))
214 | continue;
215 | }
216 |
217 | state.key = txn.m_nsrw[namespace].writes[k].key;
218 | state.version = RSVersion(_blk.number, i);
219 | state.value = txn.m_nsrw[namespace].writes[k].value;
220 | }
221 | }
222 | }
223 | }
224 |
225 | function injectBlockHashToTx(bytes32 _chainId, string memory _channelId, string memory _txId, string memory _blockHash) internal {
226 | Transaction storage txn = m_networks[_chainId].m_channels[_channelId].m_transactions[_txId];
227 | txn.blockHash = _blockHash;
228 | }
229 |
230 | function flagTx(bytes32 _chainId, string memory _channelId, string memory _txId) internal {
231 | m_networks[_chainId].m_channels[_channelId].m_transactions_exist[_txId] = true;
232 | }
233 |
234 | function decodeReadset(bytes memory _readsetRLP) internal pure returns (ReadSet memory) {
235 | RLP.RLPItem[] memory readsetRLP = _readsetRLP.toRLPItem().toList();
236 |
237 | string memory key = readsetRLP[0].toAscii();
238 |
239 | RLP.RLPItem[] memory rsv = readsetRLP[1].toList();
240 |
241 | uint blockNo = rsv[0].toUint();
242 | uint txNo = 0;
243 |
244 | if (rsv.length > 1) {
245 | txNo = rsv[1].toUint();
246 | }
247 | RSVersion memory version = RSVersion(blockNo, txNo);
248 |
249 | return ReadSet(key, version);
250 | }
251 |
252 | function decodeWriteset(bytes memory _writesetRLP) internal pure returns (WriteSet memory){
253 | RLP.RLPItem[] memory writesetRLP = _writesetRLP.toRLPItem().toList();
254 |
255 | string memory key = writesetRLP[0].toAscii();
256 | string memory value = writesetRLP[2].toAscii();
257 |
258 | bool isDelete = false;
259 | string memory isDeleteStr = writesetRLP[1].toAscii();
260 | if (keccak256(abi.encodePacked(isDeleteStr)) == keccak256(abi.encodePacked("true"))) {
261 | isDelete = true;
262 | }
263 |
264 | return WriteSet(key, isDelete, value);
265 | }
266 |
267 | function isExpectedReadVersion(Namespace memory _namespace, RSVersion memory _version, string memory _key) internal pure returns (bool) {
268 | ReadSet[] memory reads = _namespace.reads;
269 |
270 | for (uint i = 0; i < reads.length; i++) {
271 | ReadSet memory readset = reads[i];
272 |
273 | if (keccak256(abi.encodePacked(readset.key)) == keccak256(abi.encodePacked(_key)))
274 | return isSameVersion(readset.version, _version);
275 | }
276 |
277 | return false;
278 | }
279 |
280 | function isSameVersion(RSVersion memory _v1, RSVersion memory _v2) internal pure returns (bool) {
281 | if (_v1.blockNo != _v2.blockNo)
282 | return false;
283 |
284 | if (_v1.txNo != _v2.txNo)
285 | return false;
286 |
287 | return true;
288 | }
289 |
290 | function getBlock(bytes32 _chainId, string memory _channelId, string memory _blockHash) public view returns (uint, string memory, string memory, string memory, uint, uint, string memory) {
291 | Block storage blk = m_networks[_chainId].m_channels[_channelId].m_blocks[_blockHash];
292 |
293 | require(keccak256(abi.encodePacked(blk.hash)) != keccak256(abi.encodePacked("")), "Block does not exist.");
294 |
295 | string memory txs = blk.transactions[0];
296 |
297 | for (uint i = 1; i < blk.transactions.length; i++) {
298 | txs = string(abi.encodePacked(txs, ",", blk.transactions[i]));
299 | }
300 |
301 | return (blk.number, blk.hash, blk.prevHash, blk.dataHash, blk.timestamp_s, blk.timestamp_nanos, txs);
302 | }
303 |
304 | function getTransaction(bytes32 _chainId, string memory _channelId, string memory _txId) public view returns (string memory, string memory) {
305 | Transaction storage txn = m_networks[_chainId].m_channels[_channelId].m_transactions[_txId];
306 |
307 | require(isTransactionExists(_chainId, _channelId, _txId), "Transaction does not exist.");
308 |
309 | string memory ns = txn.namespaces[0];
310 |
311 | for (uint i = 1; i < txn.namespaces.length; i++) {
312 | ns = string(abi.encodePacked(ns, ",", txn.namespaces[i]));
313 | }
314 |
315 | return (txn.blockHash, ns);
316 | }
317 |
318 | function isTransactionExists(bytes32 _chainId, string memory _channelId, string memory _txId) public view returns (bool) {
319 | return m_networks[_chainId].m_channels[_channelId].m_transactions_exist[_txId];
320 | }
321 |
322 | function getNSRW(bytes32 _chainId, string memory _channelId, string memory _txId, string memory _namespace) public view returns (string memory, string memory) {
323 | Namespace storage ns = m_networks[_chainId].m_channels[_channelId].m_transactions[_txId].m_nsrw[_namespace];
324 |
325 | require(keccak256(abi.encodePacked(ns.namespace)) != keccak256(abi.encodePacked("")), "Namespace does not exist.");
326 |
327 | string memory reads;
328 | for (uint i = 0; i < ns.reads.length; i++) {
329 | RSVersion storage version = ns.reads[i].version;
330 | reads = string(abi.encodePacked(reads, "{ key: ", ns.reads[i].key, ", version: { blockNo: ", SolUtils.UintToString(version.blockNo), ", txNo: ", SolUtils.UintToString(version.txNo), " } } "));
331 | }
332 |
333 | string memory writes;
334 | for (uint j = 0; j < ns.writes.length; j++) {
335 | writes = string(abi.encodePacked(writes, "{ key: ", ns.writes[j].key, ", isDelete: ", SolUtils.BoolToString(ns.writes[j].isDelete), ", value: ", ns.writes[j].value, " } "));
336 | }
337 |
338 | return (reads, writes);
339 | }
340 |
341 | function getState(bytes32 _chainId, string memory _channelId, string memory _key) public view returns (uint, uint, string memory) {
342 | State storage state = m_networks[_chainId].m_channels[_channelId].m_state[_key];
343 |
344 | require(keccak256(abi.encodePacked(state.key)) != keccak256(abi.encodePacked("")), "Key unrecognised.");
345 |
346 | return (state.version.blockNo, state.version.txNo, state.value);
347 | }
348 | }
--------------------------------------------------------------------------------
/contracts/test/PatriciaTrieTest.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../libraries/PatriciaTrie.sol";
4 |
5 | contract PatriciaTrieTest {
6 | event Result(bool result);
7 | function testVerify(bytes memory _value, bytes memory _parentNodes, bytes memory _path, bytes32 _root) public returns (bool) {
8 | bool result = PatriciaTrie.verifyProof(_value, _parentNodes, _path, _root);
9 | emit Result(result);
10 | return result;
11 | }
12 | }
--------------------------------------------------------------------------------
/contracts/validation/Base.sol:
--------------------------------------------------------------------------------
1 | pragma solidity ^0.5.12;
2 |
3 | import "../IonCompatible.sol";
4 | import "../storage/BlockStore.sol";
5 |
6 |
7 | contract Base is IonCompatible {
8 | constructor (address _ionAddr) IonCompatible(_ionAddr) public {}
9 |
10 | function register() public returns (bool) {
11 | ion.registerValidationModule();
12 | return true;
13 | }
14 |
15 | function RegisterChain(bytes32 _chainId, address _storeAddr) public {
16 | require( _chainId != ion.chainId(), "Cannot add this chain id to chain register" );
17 | ion.addChain(_storeAddr, _chainId);
18 | }
19 |
20 | function SubmitBlock(bytes32 _chainId, bytes memory _rlpBlock, address _storageAddr) public {
21 | storeBlock(_chainId, _rlpBlock, _storageAddr);
22 | }
23 |
24 | function storeBlock(
25 | bytes32 _chainId,
26 | bytes memory _rlpBlock,
27 | address _storageAddr
28 | ) internal {
29 | // Add block to Ion
30 | ion.storeBlock(_storageAddr, _chainId, _rlpBlock);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/contracts/validation/Clique.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "../libraries/ECVerify.sol";
6 | import "../libraries/RLP.sol";
7 | import "../libraries/SolidityUtils.sol";
8 | import "../IonCompatible.sol";
9 | import "../storage/BlockStore.sol";
10 |
11 | /*
12 | Smart contract for validation of blocks that use the Clique PoA consensus algorithm
13 | Blocks must be submitted sequentially due to the voting mechanism of Clique.
14 | */
15 |
16 | contract Clique is IonCompatible {
17 | using RLP for RLP.RLPItem;
18 | using RLP for RLP.Iterator;
19 | using RLP for bytes;
20 |
21 | /*
22 | * @description persists the last submitted block of a chain being validated
23 | */
24 | struct BlockHeader {
25 | uint256 blockNumber;
26 | bytes32 blockHash;
27 | bytes32 prevBlockHash;
28 | bytes32 txRootHash;
29 | bytes32 receiptRootHash;
30 | }
31 |
32 | struct Metadata {
33 | address[] validators;
34 | mapping (address => bool) m_validators;
35 | mapping (address => uint256) m_proposals;
36 | uint256 threshold;
37 | }
38 |
39 | event GenesisCreated(bytes32 chainId, bytes32 blockHash);
40 | event BlockSubmitted(bytes32 chainId, bytes32 blockHash);
41 |
42 | /*
43 | * onlyRegisteredChains
44 | * param: _id (bytes32) Unique id of chain supplied to function
45 | *
46 | * Modifier that checks if the provided chain id has been registered to this contract
47 | */
48 | modifier onlyRegisteredChains(bytes32 _id) {
49 | require(chains[_id], "Chain is not registered");
50 | _;
51 | }
52 |
53 | mapping (bytes32 => bool) public chains;
54 | mapping (bytes32 => mapping (bytes32 => bool)) public m_blockhashes;
55 | mapping (bytes32 => mapping (bytes32 => BlockHeader)) public m_blockheaders;
56 | mapping (bytes32 => mapping (bytes32 => Metadata)) public m_blockmetadata;
57 | mapping (bytes32 => bytes32[]) public heads;
58 |
59 | constructor (address _ionAddr) IonCompatible(_ionAddr) public {}
60 |
61 | /* =====================================================================================================================
62 |
63 | Public Functions
64 |
65 | =====================================================================================================================
66 | */
67 | function register() public returns (bool) {
68 | ion.registerValidationModule();
69 | return true;
70 | }
71 |
72 | /*
73 | * RegisterChain
74 | * param: _chainId (bytes32) Unique id of another chain to interoperate with
75 | * param: _validators (address[]) Array containing the validators at the genesis block
76 | * param: _genesisHash (bytes32) Hash of the genesis block for the chain being registered with Ion
77 | * param: _storeAddr (address) Address of block store contract to register chain to
78 | *
79 | * Registers knowledge of the id of another interoperable chain requiring the genesis block metadata. Allows
80 | * the initialising of genesis blocks and their validator sets for chains. Multiple may be submitted and built upon
81 | * and is not opinionated on how they are used.
82 | */
83 | function RegisterChain(bytes32 _chainId, address[] memory _validators, bytes32 _genesisBlockHash, address _storeAddr) public {
84 | require( _chainId != ion.chainId(), "Cannot add this chain id to chain register" );
85 |
86 | if (chains[_chainId]) {
87 | require( !m_blockhashes[_chainId][_genesisBlockHash], "Chain already exists with identical genesis" );
88 | } else {
89 | chains[_chainId] = true;
90 | ion.addChain(_storeAddr, _chainId);
91 | }
92 |
93 | setGenesisBlock(_chainId, _validators, _genesisBlockHash);
94 | }
95 |
96 | /*
97 | * SubmitBlock
98 | * param: _chainId (bytes32) Unique id of chain submitting block from
99 | * param: _rlpBlockHeader (bytes) RLP-encoded byte array of the block header from other chain without the signature in extraData
100 | * param: _rlpSignedBlockHeader (bytes) RLP-encoded byte array of the block header from other chain with the signature in extraData
101 | * param: _storeAddr (address) Address of block store contract to store block to
102 | *
103 | * Submission of block headers from another chain. Signatures held in the extraData field of _rlpSignedBlockHeader is recovered
104 | * and if valid the block is persisted as BlockHeader structs defined above.
105 | */
106 | function SubmitBlock(bytes32 _chainId, bytes memory _rlpUnsignedBlockHeader, bytes memory _rlpSignedBlockHeader, address _storageAddr) onlyRegisteredChains(_chainId) public {
107 | RLP.RLPItem[] memory header = _rlpUnsignedBlockHeader.toRLPItem().toList();
108 | RLP.RLPItem[] memory signedHeader = _rlpSignedBlockHeader.toRLPItem().toList();
109 | require( header.length == signedHeader.length, "Header properties length mismatch" );
110 |
111 | // Check header and signedHeader contain the same data
112 | for (uint256 i=0; i= parentMetadata.threshold && !parentMetadata.m_validators[_candidate]) {
217 | newVoteCount = 0;
218 |
219 | for (uint i = 0; i < parentMetadata.validators.length; i++) {
220 | newValidators.push(parentMetadata.validators[i]);
221 | }
222 | newValidators.push(_candidate);
223 | } else if ( (parentMetadata.m_proposals[_candidate] + 1) >= parentMetadata.threshold && parentMetadata.m_validators[_candidate]) {
224 | newVoteCount = 0;
225 |
226 | for (uint j = 0; j < parentMetadata.validators.length; j++) {
227 | if (parentMetadata.validators[j] != _candidate) {
228 | newValidators.push(parentMetadata.validators[j]);
229 | }
230 | }
231 | } else {
232 | newVoteCount = parentMetadata.m_proposals[_candidate] + 1;
233 |
234 | for (uint k = 0; k < parentMetadata.validators.length; k++) {
235 | newValidators.push(parentMetadata.validators[k]);
236 | }
237 | }
238 |
239 | metadata.m_proposals[_candidate] = newVoteCount;
240 | newThreshold = (newValidators.length/2) + 1;
241 |
242 | for (uint vi = 0; vi < newValidators.length; vi++) {
243 | metadata.m_validators[newValidators[vi]] = true;
244 | if (newValidators[vi] != _candidate) {
245 | metadata.m_proposals[newValidators[vi]] = parentMetadata.m_proposals[newValidators[vi]];
246 | }
247 | }
248 | } else {
249 | // If no vote, set current block metadata equal to parent block
250 | metadata.validators = parentMetadata.validators;
251 | metadata.threshold = parentMetadata.threshold;
252 |
253 | for (uint pi = 0; pi < parentMetadata.validators.length; pi++) {
254 | metadata.m_validators[parentMetadata.validators[pi]] = true;
255 | metadata.m_proposals[parentMetadata.validators[pi]] = parentMetadata.m_proposals[parentMetadata.validators[pi]];
256 | }
257 | }
258 | }
259 |
260 | /*
261 | * storeBlock
262 | * param: _chainId (bytes32) Unique id of interoperating chain
263 | * param: _hash (address) Byte array of the extra data containing signature
264 | * param: _parentHash (bytes32) Current block hash being checked
265 | * param: _txRootHash (bytes32) Parent block hash of current block being checked
266 | * param: _receiptRootHash (bytes32) Parent block hash of current block being checked
267 | * param: _height (bytes32) Parent block hash of current block being checked
268 | * param: _rlpBlockHeader (bytes32) Parent block hash of current block being checked
269 | * param: _storageAddr (bytes32) Parent block hash of current block being checked
270 | *
271 | * Takes the submitted block to propagate to the storage contract.
272 | */
273 | function storeBlock(
274 | bytes32 _chainId,
275 | bytes32 _hash,
276 | bytes32 _parentHash,
277 | bytes32 _txRootHash,
278 | bytes32 _receiptRootHash,
279 | uint256 _height,
280 | bytes memory _rlpBlockHeader,
281 | address _storageAddr
282 | ) internal {
283 | m_blockhashes[_chainId][_hash] = true;
284 |
285 | BlockHeader storage header = m_blockheaders[_chainId][_hash];
286 | header.blockNumber = _height;
287 | header.blockHash = _hash;
288 | header.prevBlockHash = _parentHash;
289 | header.txRootHash = _txRootHash;
290 | header.receiptRootHash = _receiptRootHash;
291 |
292 | // Add block to Ion
293 | ion.storeBlock(_storageAddr, _chainId, _rlpBlockHeader);
294 | }
295 |
296 | /*
297 | * shiftHead
298 | * param: _chainId (bytes32) Unique id of chain
299 | * param: _childHash (bytes32) New block hash
300 | * param: _parentHash (bytes32) Previous block hash
301 | *
302 | * Updates set of current open chain heads per chain. Open chain heads are blocks that do not have a child that can
303 | * be built upon.
304 | */
305 | function shiftHead(bytes32 _chainId, bytes32 _childHash, bytes32 _parentHash) public {
306 | int index = -1;
307 | bytes32[] storage chainHeads = heads[_chainId];
308 |
309 | // Check if parent hash is an open head and replace with child
310 | for (uint i = 0; i < chainHeads.length; i++) {
311 | if (chainHeads[i] == _parentHash) {
312 | index = int(i);
313 |
314 | delete chainHeads[uint(index)];
315 | chainHeads[uint(index)] = _childHash;
316 |
317 | return;
318 | }
319 | }
320 |
321 | // If parent is not an open head, child is, so append to heads
322 | chainHeads.push(_childHash);
323 | }
324 |
325 | function getValidators(bytes32 _chainId, bytes32 _blockHash) public view returns (address[] memory) {
326 | return m_blockmetadata[_chainId][_blockHash].validators;
327 | }
328 |
329 | function getProposal(bytes32 _chainId, bytes32 _blockHash, address _candidate) public view returns (uint256) {
330 | return m_blockmetadata[_chainId][_blockHash].m_proposals[_candidate];
331 | }
332 | }
333 |
--------------------------------------------------------------------------------
/contracts/validation/IBFT.sol:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2019 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 | pragma solidity ^0.5.12;
4 |
5 | import "../libraries/ECVerify.sol";
6 | import "../libraries/RLP.sol";
7 | import "../libraries/SolidityUtils.sol";
8 | import "../IonCompatible.sol";
9 | import "../storage/BlockStore.sol";
10 |
11 | /*
12 | Smart contract for validation of blocks that use the IBFT-Soma consensus algorithm
13 | Blocks must be submitted sequentially due to the voting mechanism of IBFT-Soma.
14 | */
15 |
16 | contract IBFT is IonCompatible {
17 | using RLP for RLP.RLPItem;
18 | using RLP for RLP.Iterator;
19 | using RLP for bytes;
20 |
21 | /*
22 | * @description persists the last submitted block of a chain being validated
23 | */
24 | struct BlockHeader {
25 | uint256 blockNumber;
26 | bytes32 blockHash;
27 | bytes32 prevBlockHash;
28 | address[] validators;
29 | uint256 threshold;
30 | }
31 |
32 | event GenesisCreated(bytes32 chainId, bytes32 blockHash);
33 | event BlockSubmitted(bytes32 chainId, bytes32 blockHash);
34 |
35 | /*
36 | * onlyRegisteredChains
37 | * param: _id (bytes32) Unique id of chain supplied to function
38 | *
39 | * Modifier that checks if the provided chain id has been registered to this contract
40 | */
41 | modifier onlyRegisteredChains(bytes32 _id) {
42 | require(chains[_id], "Chain is not registered");
43 | _;
44 | }
45 |
46 | mapping (bytes32 => bool) public chains;
47 | mapping (bytes32 => bytes32) public m_chainHeads;
48 | mapping (bytes32 => mapping (bytes32 => BlockHeader)) public m_blockheaders;
49 |
50 | constructor (address _ionAddr) IonCompatible(_ionAddr) public {}
51 |
52 | /* =====================================================================================================================
53 |
54 | Public Functions
55 |
56 | =====================================================================================================================
57 | */
58 | function register() public returns (bool) {
59 | ion.registerValidationModule();
60 | return true;
61 | }
62 |
63 | /*
64 | * RegisterChain
65 | * param: _chainId (bytes32) Unique id of another chain to interoperate with
66 | * param: _validators (address[]) Array containing the validators at the genesis block
67 | * param: _genesisHash (bytes32) Hash of the genesis block for the chain being registered with Ion
68 | * param: _storeAddr (address) Address of block store contract to register chain to
69 | *
70 | * Registers knowledge of the id of another interoperable chain requiring the genesis block metadata. Allows
71 | * the initialising of genesis blocks and their validator sets for chains. Multiple may be submitted and built upon
72 | * and is not opinionated on how they are used.
73 | */
74 | function RegisterChain(bytes32 _chainId, address[] memory _validators, bytes32 _genesisBlockHash, address _storeAddr) public {
75 | require(_chainId != ion.chainId(), "Cannot add this chain id to chain register");
76 |
77 | if (chains[_chainId]) {
78 | require(m_chainHeads[_chainId] == bytes32(0x0), "Chain already exists");
79 | } else {
80 | chains[_chainId] = true;
81 | ion.addChain(_storeAddr, _chainId);
82 | }
83 |
84 | addGenesisBlock(_chainId, _validators, _genesisBlockHash);
85 | }
86 |
87 | /*
88 | * SubmitBlock
89 | * param: _chainId (bytes32) Unique id of chain submitting block from
90 | * param: _rlpUnsignedBlockHeader (bytes) RLP-encoded byte array of the block header from IBFT-Soma chain containing only validator set in IstanbulExtra field
91 | * param: _rlpSignedBlockHeader (bytes) RLP-encoded byte array of the block header from other chain including all proposal seal in the IstanbulExtra field
92 | * param: _commitSeals (bytes) RLP-encoded commitment seals that are typically contained in the last element of the IstanbulExtra field
93 | * param: _storeAddr (address) Address of block store contract to store block to
94 | *
95 | * Submission of block headers from another chain.
96 | */
97 | function SubmitBlock(bytes32 _chainId, bytes memory _rlpUnsignedBlockHeader, bytes memory _rlpSignedBlockHeader, bytes memory _commitSeals, address _storageAddr) onlyRegisteredChains(_chainId) public {
98 | RLP.RLPItem[] memory header = _rlpSignedBlockHeader.toRLPItem().toList();
99 |
100 | // Check the parent hash is the same as the previous block submitted
101 | bytes32 parentBlockHash = SolUtils.BytesToBytes32(header[0].toBytes(), 1);
102 | require(m_chainHeads[_chainId] == parentBlockHash, "Not child of previous block!");
103 |
104 | // Verify that validator and sealers are correct
105 | require(checkSignature(_chainId, header[12].toData(), keccak256(_rlpUnsignedBlockHeader), parentBlockHash), "Signer is not validator");
106 | require(checkSeals(_chainId, _commitSeals, _rlpSignedBlockHeader, parentBlockHash), "Sealer(s) not valid");
107 |
108 | // Append new block to the struct
109 | addValidators(_chainId, header[12].toData(), keccak256(_rlpSignedBlockHeader));
110 | storeBlock(_chainId, keccak256(_rlpSignedBlockHeader), parentBlockHash, header[8].toUint(), _rlpSignedBlockHeader, _storageAddr);
111 |
112 | emit BlockSubmitted(_chainId, keccak256(_rlpSignedBlockHeader));
113 | }
114 |
115 |
116 | /* =====================================================================================================================
117 |
118 | Internal Functions
119 |
120 | =====================================================================================================================
121 | */
122 |
123 | /*
124 | * addGenesisBlock
125 | * param: _chainId (bytes32) Unique id of another chain to interoperate with
126 | * param: _validators (address[]) Array containing the validators at the genesis block
127 | * param: _genesisHash (bytes32) Hash of the genesis block for the chain being registered with Ion
128 | *
129 | * Adds a genesis block with the validators and other metadata for this genesis block
130 | */
131 | function addGenesisBlock(bytes32 _chainId, address[] memory _validators, bytes32 _genesisBlockHash) internal {
132 | BlockHeader storage header = m_blockheaders[_chainId][_genesisBlockHash];
133 | header.blockNumber = 0;
134 | header.blockHash = _genesisBlockHash;
135 | header.validators = _validators;
136 | header.threshold = 2*(_validators.length/3) + 1;
137 |
138 | m_chainHeads[_chainId] = _genesisBlockHash;
139 | emit GenesisCreated(_chainId, _genesisBlockHash);
140 | }
141 |
142 | /*
143 | * checkSignature
144 | * param: _chainId (bytes32) Unique id of interoperating chain
145 | * param: _extraData (bytes) Byte array of the extra data containing signature
146 | * param: _hash (bytes32) Hash of the unsigned block header
147 | * param: _parentBlockHash (bytes32) Parent block hash of current block being checked
148 | *
149 | * Checks that the submitted block has actually been signed, recovers the signer and checks if they are validator in
150 | * parent block
151 | */
152 | function checkSignature(bytes32 _chainId, bytes memory _extraData, bytes32 _hash, bytes32 _parentBlockHash) internal view returns (bool) {
153 | // Retrieve Istanbul Extra Data
154 | bytes memory istanbulExtra = new bytes(_extraData.length - 32);
155 | SolUtils.BytesToBytes(istanbulExtra, _extraData, 32);
156 |
157 | RLP.RLPItem[] memory signature = istanbulExtra.toRLPItem().toList();
158 |
159 | bytes memory extraDataSig = new bytes(65);
160 | SolUtils.BytesToBytes(extraDataSig, signature[1].toBytes(), signature[1].toBytes().length-65);
161 |
162 | // Recover the signature
163 | address sigAddr = ECVerify.ecrecovery(keccak256(abi.encode(_hash)), extraDataSig);
164 | BlockHeader storage parentBlock = m_blockheaders[_chainId][_parentBlockHash];
165 |
166 | // Check if signature is a validator that exists in previous block
167 | return isValidator(parentBlock.validators, sigAddr);
168 | }
169 |
170 | /*
171 | * checkSeals
172 | * param: _chainId (bytes32) Unique id of interoperating chain
173 | * param: _seals (bytes) RLP-encoded list of 65 byte seals
174 | * param: _rlpBlock (bytes) Byte array of RLP encoded unsigned block header
175 | * param: _parentBlockHash (bytes32) Parent block hash of current block being checked
176 | *
177 | * Checks that the submitted block has enough seals to be considered valid as per the IBFT Soma rules
178 | */
179 | function checkSeals(bytes32 _chainId, bytes memory _seals, bytes memory _rlpBlock, bytes32 _parentBlockHash) internal view returns (bool) {
180 | bytes32 signedHash = keccak256(abi.encodePacked(keccak256(_rlpBlock), byte(0x02)));
181 | BlockHeader storage parentBlock = m_blockheaders[_chainId][_parentBlockHash];
182 | uint256 validSeals = 0;
183 |
184 | // Check if signature is a validator that exists in previous block
185 | RLP.RLPItem[] memory seals = _seals.toRLPItem().toList();
186 | for (uint i = 0; i < seals.length; i++) {
187 | // Recover the signature
188 | address sigAddr = ECVerify.ecrecovery(signedHash, seals[i].toData());
189 | if (!isValidator(parentBlock.validators, sigAddr))
190 | return false;
191 | validSeals++;
192 | }
193 |
194 | if (validSeals < parentBlock.threshold)
195 | return false;
196 |
197 | return true;
198 | }
199 |
200 | function isValidator(address[] memory _validators, address _validator) internal pure returns (bool) {
201 | for (uint i = 0; i < _validators.length; i++) {
202 | if (_validator == _validators[i])
203 | return true;
204 | }
205 | return false;
206 | }
207 |
208 | /*
209 | * addValidators
210 | * param: _chainId (bytes32) Unique id of interoperating chain
211 | * param: _extraData (bytes) Byte array of the extra data containing signature
212 | * param: _blockHash (bytes32) Current block hash being checked
213 | * param: _parentBlockHash (bytes32) Parent block hash of current block being checked
214 | *
215 | * Updates the validators from the RLP encoded extradata
216 | */
217 | function addValidators(bytes32 _chainId, bytes memory _extraData, bytes32 _blockHash) internal {
218 | BlockHeader storage newBlock = m_blockheaders[_chainId][_blockHash];
219 |
220 | // Retrieve Istanbul Extra Data
221 | bytes memory rlpIstanbulExtra = new bytes(_extraData.length - 32);
222 | SolUtils.BytesToBytes(rlpIstanbulExtra, _extraData, 32);
223 |
224 | RLP.RLPItem[] memory istanbulExtra = rlpIstanbulExtra.toRLPItem().toList();
225 | RLP.RLPItem[] memory decodedExtra = istanbulExtra[0].toBytes().toRLPItem().toList();
226 |
227 | for (uint i = 0; i < decodedExtra.length; i++) {
228 | address validator = decodedExtra[i].toAddress();
229 | newBlock.validators.push(validator);
230 | }
231 |
232 | newBlock.threshold = 2*(newBlock.validators.length/3) + 1;
233 | }
234 |
235 | /*
236 | * storeBlock
237 | * param: _chainId (bytes32) Unique id of interoperating chain
238 | * param: _hash (address) Byte array of the extra data containing signature
239 | * param: _parentHash (bytes32) Current block hash being checked
240 | * param: _height (bytes32) Parent block hash of current block being checked
241 | * param: _rlpBlockHeader (bytes32) Parent block hash of current block being checked
242 | * param: _storageAddr (bytes32) Parent block hash of current block being checked
243 | *
244 | * Takes the submitted block to propagate to the storage contract.
245 | */
246 | function storeBlock(
247 | bytes32 _chainId,
248 | bytes32 _hash,
249 | bytes32 _parentHash,
250 | uint256 _height,
251 | bytes memory _rlpBlockHeader,
252 | address _storageAddr
253 | ) internal {
254 | m_chainHeads[_chainId] = _hash;
255 |
256 | BlockHeader storage header = m_blockheaders[_chainId][_hash];
257 | header.blockNumber = _height;
258 | header.blockHash = _hash;
259 | header.prevBlockHash = _parentHash;
260 |
261 | delete m_blockheaders[_chainId][_parentHash];
262 |
263 | // Add block to Ion
264 | ion.storeBlock(_storageAddr, _chainId, _rlpBlockHeader);
265 | }
266 |
267 | function getValidators(bytes32 _chainId) public view returns (address[] memory) {
268 | return m_blockheaders[_chainId][m_chainHeads[_chainId]].validators;
269 | }
270 |
271 | }
272 |
--------------------------------------------------------------------------------
/docker_build/account/keystore/UTC--2018-06-05T09-31-57.109288703Z--2be5ab0e43b6dc2908d5321cf318f35b80d0c10d:
--------------------------------------------------------------------------------
1 | {"address":"2be5ab0e43b6dc2908d5321cf318f35b80d0c10d","crypto":{"cipher":"aes-128-ctr","ciphertext":"0b11aa865046778a1b16a9b8cb593df704e3fe09f153823d75442ad1aab66caa","cipherparams":{"iv":"4aa66b789ee2d98cf77272a72eeeaa50"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"b957fa7b7577240fd3791168bbe08903af4c8cc62c304f1df072dc2a59b1765e"},"mac":"197a06eb0449301d871400a6bdf6c136b6f7658ee41e3f2f7fd81ca11cd954a3"},"id":"a3cc1eae-3e36-4659-b759-6cf416216e72","version":3}
--------------------------------------------------------------------------------
/docker_build/account/password-2be5ab0e43b6dc2908d5321cf318f35b80d0c10d.txt:
--------------------------------------------------------------------------------
1 | password1
--------------------------------------------------------------------------------
/docker_build/clique.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "chainId": 1515,
4 | "homesteadBlock": 1,
5 | "eip150Block": 2,
6 | "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
7 | "eip155Block": 3,
8 | "eip158Block": 3,
9 | "byzantiumBlock": 4,
10 | "clique": {
11 | "period": 1,
12 | "epoch": 30000
13 | }
14 | },
15 | "nonce": "0x0",
16 | "timestamp": "0x5b165989",
17 | "extraData": "0x00000000000000000000000000000000000000000000000000000000000000002be5ab0e43b6dc2908d5321cf318f35b80d0c10d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
18 | "gasLimit": "0xFFFFFFFFFFFF",
19 | "difficulty": "0x1",
20 | "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
21 | "coinbase": "0x0000000000000000000000000000000000000000",
22 | "alloc": {
23 | "2be5ab0e43b6dc2908d5321cf318f35b80d0c10d": {
24 | "balance": "0x200000000000000000000000000000000000000000000000000000000000000"
25 | }
26 | },
27 | "number": "0x0",
28 | "gasUsed": "0x0",
29 | "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
30 | }
31 |
--------------------------------------------------------------------------------
/docker_build/launch_geth.sh:
--------------------------------------------------------------------------------
1 | !#/bin/bash
2 | geth --datadir docker_build/account/ --syncmode 'full' --port 30311 --rpc --rpcaddr '0.0.0.0' --rpcport 8545 --networkid 1515 --gasprice '0' --targetgaslimit 0xFFFFFFFFFFFF --unlock '0x2be5ab0e43b6dc2908d5321cf318f35b80d0c10d' --password docker_build/account/password-2be5ab0e43b6dc2908d5321cf318f35b80d0c10d.txt --mine
3 |
--------------------------------------------------------------------------------
/docker_build/password:
--------------------------------------------------------------------------------
1 | here_is_password
2 |
--------------------------------------------------------------------------------
/docs/Ion-CLI.md:
--------------------------------------------------------------------------------
1 | # Ion Command Line Interface
2 | The Ion CLI is a tool which allows users to easily interact with the Ion project. Written in golang it allows rapid development of new commands and contracts by leveraging the [ishell](https://github.com/abiosoft/ishell) and [go-ethereum smart contract bindings](https://github.com/ethereum/go-ethereum/wiki/Native-DApps:-Go-bindings-to-Ethereum-contracts).
3 |
4 | ***Note:*** The Ion CLI is not a trusted part of the Ion infrastructure rather it is just a tool to facilitate users, who should verify it functionality prior to using any unknown code.
5 |
6 | ## Running Ion CLI
7 | In order to compile the Ion CLI run:
8 | ```
9 | $ cd /path/to/validation/src
10 | $ make build
11 | $ make test
12 | ```
13 |
14 | Given all tests pass, the Ion CLI can be run. Prior to running the user must ensure that the `setup.json` file has been modified to contain:
15 |
16 | * Address and port of foreign Clique chain rpc
17 | * Address and port of native chain rpc
18 | * User account on foreign Clique chain
19 | * User account on native chain
20 | * Address of the deployed validation contract on native
21 |
22 | Once this has been setup correctly the CLI can be launched as follows:
23 | ```
24 | $ ./ion-cli -config [/path/to/setup.json]
25 | ===============================================================
26 | Ion Command Line Interface
27 |
28 | RPC Client [to]:
29 | Listening on: 127.0.0.1:8501
30 | User Account: 0x2be5ab0e43b6dc2908d5321cf318f35b80d0c10d
31 | Ion Contract: 0xb9fd43a71c076f02d1dbbf473c389f0eacec559f
32 |
33 | RPC Client [from]:
34 | Listening on: 127.0.0.1:8545
35 | User Account: 0x8671e5e08d74f338ee1c462340842346d797afd3
36 | ===============================================================
37 | >>>
38 | ```
39 |
40 | Running help displays the available commands:
41 | ```
42 | >>> help
43 |
44 | Commands:
45 | clear clear the screen
46 | exit exit the program
47 | getBlock use: getBlock [integer]
48 | description: Returns block header specified
49 | getValidators use: getValidators
50 | description: Returns the whitelist of validators from validator contract
51 | help display help
52 | latestBlock use: latestBlock
53 | description: Returns number of latest block mined/sealed
54 | latestValidationBlock use: latestValidationBlock
55 | description: Returns hash of the last block submitted to the validation contract
56 | submitValidationBlock use: submitValidationBlock [integer]
57 | description: Returns the RLP block header, signed block prefix, extra data prefix and submits to validation contract
58 | ```
59 |
60 | ### Tutorial
61 |
62 |
63 | ## Extending the Ion CLI
64 | In order to add your contract to the Ion CLI first a golang version of the solidity smart contract needs to be created, to do this we follow the instructions from [go-ethereum smart contract bindings](https://github.com/ethereum/go-ethereum/wiki/Native-DApps:-Go-bindings-to-Ethereum-contracts).
65 |
66 | We will add a contract called `Spoon.sol` to the CLI. This requires the generation of the `abi` and `bin` files. To do this run:
67 | ```
68 | $ npm run genbin
69 | $ npm run genabi
70 | ```
71 | Now the latest versions of the `abi` and `bin` files will be found in the `/path/to/ion/contracts/` directory. Next generate the `.go` version of the desired smart contract using the `abigen` to do so run:
72 | ```
73 | $ abigen --bin=/path/to/Spoon.bin --abi /path/to/Spoon.abi --pkg contract --type Spoon --out Spoon.go
74 | ```
75 | next place the output `Spoon.go` in the package specific directory for your golang code. The contract can then be interfaced with simply through importing the contract package.
76 |
77 | ### Golang Smart Contract Interface
78 | Given the exisiting Ion CLI framework any additional contracts should be placed in the `ion/ion-cli/contracts/` directory and appended to the contract package.
79 |
80 | To use an instance of the Spoon contract insert:
81 | ```
82 | func InitSpoonContract(setup Setup, client *ethclient.Client) (Spoon *contract.Spoon) {
83 | // Initialise the contract
84 | address := common.HexToAddress(setup.Ion)
85 | Spoon, err := contract.NewSpoon(address, client)
86 | if err != nil {
87 | log.Fatal(err)
88 | }
89 |
90 | return
91 | }
92 | ```
93 |
--------------------------------------------------------------------------------
/docs/Roadmap.md:
--------------------------------------------------------------------------------
1 | # Ion Stage 2: Phase 1 Roadmap
2 |
3 | Ion Stage 2 separates the cross chain payment use case from the interoperability solution. Focus has moved away from pure atomic exchange of value across chains, towards a mechanism to prove state across chains.
4 |
5 | Given two blockchains A and B the state of B should be verifiable on A and vice-versa. To do this a smart-contract should be developed which effectively becomes a light client of the corresponding blockchain. This facilitates interoperability on a much larger scale than simply value transactions.
6 |
7 | However the cross chain payment still serves as an illustrative example of how the solution would work in a specific scenario, but it is not part of the core solution to be developed. The Ion Stage 2 relies on a smart contract that allows storing and verification of the state of another chain. The verification of blocks from a foreign blockchain on a native chain should leverage the underlying consensus of the chain that is to be passed.
8 |
9 | In Phase 1 we intend to tackle two different aspects:
10 | * Storing and proving of state from a foreign chain
11 | * Validation of passed foreign state
12 |
13 | We assume the context of the validator set is an existing known set of nodes that also engage in the consensus protocol of a foreign chain so as to ensure the validity of signed state.
14 |
15 | ## Nomenclature
16 | We define are a common set of terminology which will be used herein that have a specific meaning or context within the project.
17 |
18 | * Native Chain: Refers to the chain where active interaction is taking place. This is where state from the foreign chain will be sent to for persistence and be verified against.
19 | * Foreign Chain: Refers to the chain whose state is being persisted. Signed data about blocks from this chain will be passed to the native chain and stored.
20 |
21 | The above naming scheme applies in the context of describing interaction flow in a one-way setting where state is passed from one chain to another. In more complex flow where both chains are actively interacted with, this naming convention may be omitted to reduce confusion.
22 |
23 | * Proof: Refers to merkle proof-like mechanisms to assert the existence of an item or event in a block or root hash
24 | * Validation: Refers to the signing and verifying of signatures of a piece of data, usually the block hash/header of a foreign chain
25 | * State: Refers to the data captured and transferred between chains that facilitates the ability to prove a state transition of another chain. This will consist of another chain's block header and block hash.
26 |
27 | ## Targets
28 | State-proving is our fundamental goal but we frame our use-case around performing a cross-chain PvP payment. Any programmable contract can interface with the Relay contract and we outline an initial example through our use-case of how this would be achieved.
29 |
30 | In order to perform a cross-chain PvP payment we must:
31 | * Prove state of blockchain A on B
32 | * Verify the signatures of block signatories from a different blockchain
33 | * Settle a cross-chain transaction between two counterparties via the above mechanisms
34 | * Provide well-documented interfaces to allow users to easily interact with the project
35 |
36 | ### Assumptions
37 | Listed here are the assumptions with which the project is being developed:
38 | * Ethereum-based blockchain
39 | * IBFT Consensus Protocol or other immediate-finality algorithms
40 | * Permissioned network
41 | * Validator set is known and assumed as correct
42 |
43 | ## Project Planning
44 | Ion stage 2 will be developed using agile methodologies, with fortnightly sprints. Note that the sprint objective will remain dynamic and should change as the project continues.
45 |
46 | ## Project Planning
47 | Ion stage 2 will be developed using agile methodologies, with fortnightly sprints. Note that the sprint objective will remain dynamic and should change as the project continues.
48 |
49 | ### Sprint 6 - Minimal Viabable Ion Stage 2
50 | Date: 16.07.2018 - 23.07.2018
51 |
52 | Description:
53 | Various separated components should be integrated with required tooling and documentation.
54 |
55 | Goals:
56 | * Ability to submit and verify proofs against Ion Relay contract
57 | * Execute contract given state transition
58 |
59 | Achieved:
60 |
61 | ### Sprint 5 - Off-Chain Proof Generation
62 | Date: 02.07.2018 - 13.07.2018
63 |
64 | Description:
65 | We aim to be able to generate proofs _off-chain_, preferably using the Ion CLI. This is the key part to being able to make claims against state _on-chain_.
66 |
67 | Goals:
68 | * Research indepth the complexities of creating continually executing smart-contracts
69 | * Update Ion specification
70 | * Generate off-chain proofs of state transition: Solidity, Golang, and Ion CLI Integration
71 | * Begin research into the outline of potential use cases i.e. PvP
72 | * Increase testing coverage of smart contracts and Ion CLI
73 |
74 | Achieved:
75 |
76 | ### Sprint 4 - User Flow Development
77 | Date: 25.06.2018 - 29.06.2018
78 |
79 | Description:
80 | Given the original user stories the smart contract should now contain minimum functions necessary to interact with the project. This should naturally be an extension of the previous week to smooth out the integration and interaction flows of the stack.
81 |
82 | Goals:
83 | * Smart contract should now have protection for edge-cases
84 | * Addition of user permissioniong
85 | * Automation of block generation
86 | * Tutorial CLI and Validation contract
87 | * CLI Golang
88 |
89 | Achieved:
90 | * Automation of block generation
91 | * Tutorial CLI and Validation contract
92 | * CLI Golang
93 |
94 | Notes:
95 | * Sprints changed to fortnightly from this point onwards
96 |
97 | ### Sprint 3 - Validation of Passed State
98 | Date: 18.06.2018 - 22.06.2018
99 |
100 | Description:
101 | The two separate problems of validation and proofs should be integrated and a minimum smart-contract that allows the immediate validation of a submitted block be developed.
102 |
103 | Goals:
104 | * Single contract which allows state proof and block validation to be performed simultaneously
105 |
106 | Achieved:
107 |
108 | ### Sprint 2 - Skeleton Implementation
109 | Date: 11.06.2018 - 15.06.2018
110 |
111 | Description:
112 | It should be shown that it is indeed possible to prove the state of a foreign on a native chain and make assertions of that state. Separately it should be shown that the validators from foreign chain can be added to the native chain. Blocks submitted and validated on the foreign chain validated on the native chain using the signature of the foreign validator set.
113 |
114 | Goals:
115 | * Smart contract for state proof verification
116 | * Tests for state proofs
117 | * Smart contract for block validation
118 | * Tests for block validation
119 |
120 | Achieved:
121 |
122 |
123 | ### Sprint 1 - PoC Final Proposal Definition.
124 | Date: 04.06.2018 - 08.06.2018
125 |
126 | Description:
127 | We aim to describe fully how the Phase 1 PoC would work, detailing in entirety the functionality of all smart-contracts to be developed.
128 |
129 | Goals:
130 | * Project specification.
131 |
132 | Achieved:
133 | * Specification was released
134 |
135 |
136 |
137 |
--------------------------------------------------------------------------------
/migrations/1_initial_migration.js:
--------------------------------------------------------------------------------
1 | var Migrations = artifacts.require("./Migrations.sol");
2 |
3 | module.exports = function(deployer) {
4 | deployer.deploy(Migrations);
5 | };
6 |
--------------------------------------------------------------------------------
/migrations/2_deploy_contracts.js:
--------------------------------------------------------------------------------
1 | const Ion = artifacts.require("Ion");
2 | const Clique = artifacts.require("Clique");
3 | const EthereumStore = artifacts.require("EthereumStore");
4 | const EventFunction = artifacts.require("Function");
5 | const EventVerifier = artifacts.require("TriggerEventVerifier");
6 |
7 | module.exports = async (deployer) => {
8 | try {
9 | deployer.deploy(Ion, "0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177")
10 | .then(() => Ion.deployed)
11 | .then(() => deployer.deploy(EthereumStore, Ion.address))
12 | .then(() => EthereumStore.deployed)
13 | .then(() => deployer.deploy(Clique, Ion.address))
14 | .then(() => Clique.deployed)
15 | .then(() => deployer.deploy(EventVerifier))
16 | .then(() => EventVerifier.deployed)
17 | .then(() => deployer.deploy(EventFunction, Ion.address, EventVerifier.address))
18 | .then(() => EventFunction.deployed)
19 | } catch(err) {
20 | console.log('ERROR on deploy:',err);
21 | }
22 |
23 | };
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ion",
3 | "version": "1.0.0",
4 | "description": "Inter Operability Network",
5 | "main": "truffle.js",
6 | "repository": "https://github.com/clearmatics/ion.git",
7 | "author": "opensource@clearmatics.com",
8 | "license": "LGPL-3.0+",
9 | "dependencies": {
10 | "axios": "^0.21.1",
11 | "bignumber.js": "^8.0.1",
12 | "concat-stream": "^1.5.2",
13 | "ethereumjs-block": "^2.0.0",
14 | "ethereumjs-tx": "^1.3.5",
15 | "lodash.template": "^4.5.0",
16 | "merkle-patricia-tree": "^2.3.1",
17 | "node-gyp": "^3.8.0",
18 | "rlp": "^2.0.0",
19 | "solc": "^0.5.12",
20 | "solhint": "^1.1.10",
21 | "truffle-assertions": "^0.9.2",
22 | "underscore": "^1.12.1",
23 | "web3-eth-accounts": "^1.0.0-beta.34",
24 | "yargs-parser": "^5.0.1"
25 | },
26 | "devDependencies": {
27 | "chai": "^4.1.2",
28 | "chai-as-promised": "^7.1.1",
29 | "ganache-cli": "^6.7.0",
30 | "json-bigint-string": "^1.0.0",
31 | "lodash": "^4.17.21",
32 | "solidity-coverage": "^0.7.0",
33 | "truffle": "^5.3.6",
34 | "web3": "1.0.0-beta.33",
35 | "web3-eth-abi": "^1.3.5",
36 | "web3-utils": "1.0.0-beta.33"
37 | },
38 | "scripts": {
39 | "testrpc": "ganache-cli --port 8545 --gasLimit 0xFFFFFFFFFFFFF --gasPrice 0 --defaultBalanceEther 99999999999 --networkId 1234",
40 | "clirpc": "ganache-cli --port 8545 --gasLimit 0xFFFFFFFFFFFFF --gasPrice 0 --defaultBalanceEther 99999999999 --networkId 1234",
41 | "compile": "truffle compile",
42 | "deploy": "truffle deploy",
43 | "test": "truffle test",
44 | "debug": "truffle debug",
45 | "coverage": "solidity-coverage",
46 | "lint": "solhint contracts/**/*.sol",
47 | "genbin": "solc --overwrite --bin ./contracts/*.sol -o abi",
48 | "genabi": "solc --overwrite --abi ./contracts/*.sol -o abi"
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/test/clique.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | /*
5 | Clique Validation contract test
6 |
7 | Tests here are standalone unit tests for clique module functionality.
8 | Other contracts have been mocked to simulate basic behaviour.
9 |
10 | Tests the clique scheme for block submission, validator signature verification and more.
11 | */
12 |
13 | const eth_util = require('ethereumjs-util');
14 | const utils = require('./helpers/utils.js');
15 | const encoder = require('./helpers/encoder.js');
16 | const Web3 = require('web3');
17 | const Web3Utils = require('web3-utils');
18 | const rlp = require('rlp');
19 | const truffleAssert = require('truffle-assertions');
20 | const sha3 = require('js-sha3').keccak_256
21 |
22 | const Clique = artifacts.require("Clique");
23 | const MockIon = artifacts.require("MockIon");
24 | const MockStorage = artifacts.require("MockStorage");
25 |
26 | const web3 = new Web3();
27 | const rinkeby = new Web3();
28 |
29 | web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
30 | rinkeby.setProvider(new web3.providers.HttpProvider('https://rinkeby.infura.io/v3/430e7d9d2b104879aee73ced56f0b8ba'));
31 |
32 | require('chai')
33 | .use(require('chai-as-promised'))
34 | .should();
35 |
36 | // Takes a header and private key returning the signed data
37 | // Needs extraData just to be sure of the final byte
38 | signHeader = (headerHash, privateKey, extraData) => {
39 | const sig = eth_util.ecsign(headerHash, privateKey)
40 | if (this._chainId > 0) {
41 | sig.v += this._chainId * 2 + 8
42 | }
43 |
44 | const pubKey = eth_util.ecrecover(headerHash, sig.v, sig.r, sig.s);
45 | const addrBuf = eth_util.pubToAddress(pubKey);
46 |
47 | const newSigBytes = Buffer.concat([sig.r, sig.s]);
48 | let newSig;
49 |
50 | const bytes = utils.hexToBytes(extraData)
51 | const finalByte = bytes.splice(bytes.length-1)
52 | if (finalByte.toString('hex')=="0") {
53 | newSig = newSigBytes.toString('hex') + '00';
54 | }
55 | if (finalByte.toString('hex')=="1") {
56 | newSig = newSigBytes.toString('hex') + '01';
57 | }
58 |
59 | return newSig;
60 | }
61 |
62 | const DEPLOYEDCHAINID = "0xab830ae0774cb20180c8b463202659184033a9f30a21550b89a2b406c3ac8075"
63 | const TESTCHAINID = "0x22b55e8a4f7c03e1689da845dd463b09299cb3a574e64c68eafc4e99077a7254"
64 |
65 | const VALIDATORS_START = ["0x42eb768f2244c8811c63729a21a3569731535f06", "0x7ffc57839b00206d1ad20c69a1981b489f772031", "0xb279182d99e65703f0076e4812653aab85fca0f0"];
66 | const VALIDATORS_FINISH = ["0x42eb768f2244c8811c63729a21a3569731535f06", "0x6635f83421bf059cd8111f180f0727128685bae4", "0x7ffc57839b00206d1ad20c69a1981b489f772031", "0xb279182d99e65703f0076e4812653aab85fca0f0"];
67 | const GENESIS_HASH = "0xf32b505a5ad95dfa88c2bd6904a1ba81a92a1db547dc17f4d7c0f64cf2cddbb1";
68 | const ADD_VALIDATORS_GENESIS_HASH = "0xf32b505a5ad95dfa88c2bd6904a1ba81a92a1db547dc17f4d7c0f64cf2cddbb1";
69 |
70 |
71 | contract('Clique.js', (accounts) => {
72 | const joinHex = arr => '0x' + arr.map(el => el.slice(2)).join('');
73 |
74 | const watchEvent = (eventObj) => new Promise((resolve,reject) => eventObj.watch((error,event) => error ? reject(error) : resolve(event)));
75 |
76 | // Fetch genesis from rinkeby
77 | let genesisBlock;
78 | let VALIDATORS;
79 | let GENESIS_HASH;
80 |
81 | let ion;
82 | let clique;
83 | let storage;
84 |
85 | beforeEach('setup contract for each test', async function () {
86 | ion = await MockIon.new(DEPLOYEDCHAINID);
87 | clique = await Clique.new(ion.address);
88 | storage = await MockStorage.new(ion.address);
89 |
90 | genesisBlock = await await rinkeby.eth.getBlock(0);
91 | VALIDATORS = encoder.extractValidators(genesisBlock.extraData);
92 | GENESIS_HASH = genesisBlock.hash;
93 | })
94 |
95 | it('Deploy Contract', async () => {
96 | let chainId = await ion.chainId();
97 |
98 | assert.equal(chainId, DEPLOYEDCHAINID);
99 | })
100 |
101 | describe('Register Chain', () => {
102 | it('Successful Register Chain', async () => {
103 | // Successfully add id of another chain
104 | let tx = await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
105 | console.log("\tGas used to register chain = " + tx.receipt.gasUsed.toString() + " gas");
106 | let chainExists = await clique.chains(TESTCHAINID);
107 |
108 | assert(chainExists);
109 |
110 | // Fail adding id of this chain
111 | await clique.RegisterChain(storage.address, DEPLOYEDCHAINID, VALIDATORS, GENESIS_HASH).should.be.rejected;
112 |
113 | // Fail adding id of chain already initialised
114 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address).should.be.rejected;
115 | })
116 |
117 | it('Check Validators', async () => {
118 | // Successfully add id of another chain
119 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
120 |
121 | let registeredValidators = await clique.getValidators.call(TESTCHAINID, GENESIS_HASH);
122 |
123 | for (let i = 0; i < VALIDATORS.length; i++) {
124 | let validatorExists = registeredValidators.map(v => v.toLowerCase()).some(v => { return v == VALIDATORS[i] });;
125 | assert(validatorExists);
126 | }
127 | })
128 |
129 | it('Check Genesis Hash', async () => {
130 | // Successfully add id of another chain
131 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
132 |
133 | let header = await clique.m_blockheaders(TESTCHAINID, GENESIS_HASH);
134 | let blockHeight = header[0];
135 |
136 | assert.equal(0, blockHeight);
137 | })
138 | })
139 |
140 | describe('Submit Block', () => {
141 | it('Authentic Submission Happy Path', async () => {
142 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
143 |
144 | // Fetch block 1 from rinkeby
145 | const block = await rinkeby.eth.getBlock(1);
146 |
147 | const rlpHeaders = encoder.encodeBlockHeader(block);
148 | const signedHeaderHash = Web3Utils.sha3(rlpHeaders.signed);
149 | assert.equal(block.hash, signedHeaderHash);
150 |
151 | // Submit block should succeed
152 | const validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
153 | let event = validationReceipt.receipt.rawLogs.some(l => { return l.topics[0] == '0x' + sha3("AddedBlock()") });
154 | assert.ok(event, "Stored event not emitted");
155 |
156 | const submittedEvent = validationReceipt.logs.find(l => { return l.event == 'BlockSubmitted' });
157 | assert.equal(signedHeaderHash, submittedEvent.args.blockHash);
158 |
159 | let blockHashExists = await clique.m_blockhashes(TESTCHAINID, block.hash);
160 | assert(blockHashExists);
161 |
162 | let header = await clique.m_blockheaders(TESTCHAINID, block.hash);
163 |
164 | // Separate fetched header info
165 | parentHash = header[2];
166 |
167 | // Assert that block was persisted correctly
168 | assert.equal(parentHash, block.parentHash);
169 | })
170 |
171 | // Here the block header is signed off chain but by a a non-whitelisted validator
172 | it('Fail Submit Block unkown validator - SubmitBlock()', async () => {
173 | // Successfully add id of another chain
174 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
175 |
176 | // Fetch block 1 from rinkeby
177 | const block = await rinkeby.eth.getBlock(1);
178 |
179 | // Alter txHashin the unsigned header concatenation
180 | const rlpHeaders = encoder.encodeBlockHeader(block);
181 | const signedHeader = rlpHeaders.signed;
182 | const unsignedHeader = rlpHeaders.unsigned;
183 |
184 | // Remove last 65 Bytes of extraData
185 | const extraBytesShort = rlpHeaders.extraBytesShort;
186 | const extraDataSignature = rlpHeaders.extraDataSignature;
187 | const extraDataShort = rlpHeaders.extraDataShort;
188 |
189 | const signedHeaderHash = Web3Utils.sha3(signedHeader);
190 | const unsignedHeaderHash = Web3Utils.sha3(unsignedHeader);
191 |
192 | // Encode and sign the new header
193 | const encodedExtraData = '0x' + rlp.encode(extraDataShort).toString('hex');
194 | const newSignedHeaderHash = eth_util.sha3(unsignedHeader);
195 |
196 | const privateKey = Buffer.from('4f35bad50b8b07fff875ec9d4dec6034b1cb0f7d283db4ce7df8fcfaa2030308', 'hex')
197 |
198 | let signature = await signHeader(newSignedHeaderHash, privateKey, block.extraData);
199 |
200 | // Append signature to the end of extraData
201 | const sigBytes = utils.hexToBytes(signature.toString('hex'));
202 | const newExtraDataBytes = extraBytesShort.concat(sigBytes);
203 | const newExtraData = '0x' + utils.bytesToHex(newExtraDataBytes);
204 |
205 | const newSignedHeader = [
206 | block.parentHash,
207 | block.sha3Uncles,
208 | block.miner,
209 | block.stateRoot,
210 | block.transactionsRoot,
211 | block.receiptsRoot,
212 | block.logsBloom,
213 | Web3Utils.toBN(block.difficulty),
214 | Web3Utils.toBN(block.number),
215 | block.gasLimit,
216 | block.gasUsed,
217 | Web3Utils.toBN(block.timestamp),
218 | newExtraData, // Off-chain signed block
219 | block.mixHash,
220 | block.nonce
221 | ];
222 |
223 | // Encode the offchain signed header
224 | const offchainSignedHeader = '0x' + rlp.encode(newSignedHeader).toString('hex');
225 | const offchainHeaderHash = Web3Utils.sha3(offchainSignedHeader);
226 |
227 | await clique.SubmitBlock(TESTCHAINID, unsignedHeader, offchainSignedHeader, storage.address).should.be.rejected;
228 |
229 | })
230 |
231 | it('Fail Submit Block from unknown chain - SubmitBlock()', async () => {
232 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
233 |
234 | // Fetch block 1 from testrpc
235 | const block = await rinkeby.eth.getBlock(1);
236 |
237 | const rlpHeaders = encoder.encodeBlockHeader(block);
238 |
239 | // Submit block should fail
240 | await clique.SubmitBlock(TESTCHAINID.slice(0, -2) + "ff", rlpHeaders.unsigned, rlpHeaders.signed, storage.address).should.be.rejected;
241 |
242 | })
243 |
244 | it('Fail Submit Block with wrong unsigned header - SubmitBlock()', async () => {
245 | await clique.RegisterChain(TESTCHAINID, VALIDATORS, GENESIS_HASH, storage.address);
246 |
247 | // Fetch block 1 from testrpc
248 | const block = await rinkeby.eth.getBlock(1);
249 |
250 | const rlpHeaders = encoder.encodeBlockHeader(block);
251 |
252 | const signedHeaderHash = Web3Utils.sha3(rlpHeaders.signed);
253 | assert.equal(block.hash, signedHeaderHash);
254 |
255 | let unsignedHeader = rlpHeaders.rawunsigned;
256 | unsignedHeader[5] = unsignedHeader[5].slice(0, -2) + "fa";
257 | const encodedUnsignedHeader = '0x' + rlp.encode(unsignedHeader).toString('hex');
258 | const unsignedHeaderHash = Web3Utils.sha3(rlpHeaders.unsigned);
259 |
260 | // Submit block should fail
261 | await clique.SubmitBlock(TESTCHAINID, encodedUnsignedHeader, rlpHeaders.signed, storage.address).should.be.rejected;
262 |
263 | })
264 |
265 |
266 | // This test checks that new validators get added into the validator list as blocks are submitted to the contract.
267 | // Rinkeby adds its first non-genesis validator at block 873987 with the votes occuring at blocks 873983 and 873986
268 | // we will start following the chain from 873982 and then add blocks until the vote threshold, n/2 + 1, is passed.
269 | it('Add Validators Through Block Submission', async () => {
270 | await clique.RegisterChain(TESTCHAINID, VALIDATORS_START, ADD_VALIDATORS_GENESIS_HASH, storage.address);
271 |
272 | let registeredValidators = await clique.getValidators.call(TESTCHAINID, ADD_VALIDATORS_GENESIS_HASH);
273 | let voteThreshold = Math.floor((registeredValidators.length/2) + 1);
274 | assert.equal(voteThreshold, 2);
275 |
276 | let voteProposal = await clique.getProposal.call(TESTCHAINID, ADD_VALIDATORS_GENESIS_HASH, VALIDATORS_FINISH[1]);
277 | assert.equal(voteProposal, 0);
278 |
279 | // Fetch block 873982 from rinkeby
280 | let block = await rinkeby.eth.getBlock(873982);
281 | let rlpHeaders = encoder.encodeBlockHeader(block);
282 |
283 | // Submit block should succeed
284 | let validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
285 | console.log("\tGas used to submit block 873982 = " + validationReceipt.receipt.gasUsed.toString() + " gas");
286 |
287 | // Fetch block 873983 from rinkeby
288 | block = await rinkeby.eth.getBlock(873983);
289 | rlpHeaders = encoder.encodeBlockHeader(block);
290 |
291 | // Submit block should succeed
292 | validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
293 | console.log("\tGas used to submit block 873983 = " + validationReceipt.receipt.gasUsed.toString() + " gas");
294 | let submittedEvent = validationReceipt.logs.find(l => { return l.event == 'BlockSubmitted' });
295 | let blockHash = submittedEvent.args.blockHash;
296 |
297 | // Check proposal is added
298 | voteProposal = await clique.getProposal.call(TESTCHAINID, blockHash, VALIDATORS_FINISH[1]);
299 | assert.equal(voteProposal, 1);
300 |
301 | // Fetch block 873984 from rinkeby
302 | block = await rinkeby.eth.getBlock(873984);
303 | rlpHeaders = encoder.encodeBlockHeader(block);
304 |
305 | // Submit block should succeed
306 | validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
307 | console.log("\tGas used to submit block 873984 = " + validationReceipt.receipt.gasUsed.toString() + " gas");
308 |
309 | // Fetch block 873985 from rinkeby
310 | block = await rinkeby.eth.getBlock(873985);
311 | rlpHeaders = encoder.encodeBlockHeader(block);
312 |
313 | // Submit block should succeed
314 | validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
315 | console.log("\tGas used to submit block 873985 = " + validationReceipt.receipt.gasUsed.toString() + " gas");
316 |
317 | // Fetch block 873986 from rinkeby
318 | block = await rinkeby.eth.getBlock(873986);
319 | rlpHeaders = encoder.encodeBlockHeader(block);
320 |
321 | // Submit block should succeed
322 | validationReceipt = await clique.SubmitBlock(TESTCHAINID, rlpHeaders.unsigned, rlpHeaders.signed, storage.address);
323 | console.log("\tGas used to submit block 873986 = " + validationReceipt.receipt.gasUsed.toString() + " gas");
324 | submittedEvent = validationReceipt.logs.find(l => { return l.event == 'BlockSubmitted' });
325 | blockHash = submittedEvent.args.blockHash;
326 |
327 | // Check proposal is added
328 | voteProposal = await clique.getProposal.call(TESTCHAINID, blockHash, VALIDATORS_FINISH[1]);
329 | assert.equal(voteProposal, 0);
330 |
331 | // Check all validators exist
332 | registeredValidators = await clique.getValidators.call(TESTCHAINID, blockHash);
333 | for (let i = 0; i < VALIDATORS_FINISH.length; i++) {
334 | let validatorExists = registeredValidators.map(v => v.toLowerCase()).some(v => { return v == VALIDATORS_FINISH[i] });
335 | assert(validatorExists);
336 | }
337 |
338 | // Check that the vote threshold has increased with validator set size
339 | voteThreshold = Math.floor((registeredValidators.length/2) + 1);
340 | assert.equal(voteThreshold, 3);
341 | })
342 | })
343 | });
344 |
--------------------------------------------------------------------------------
/test/helpers/encoder.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | const Web3Utils = require('web3-utils');
5 | const rlp = require('rlp');
6 | const utils = require('./utils.js');
7 |
8 | const encoder = {};
9 |
10 | // Encodes the block headers from clique returning the signed and unsigned instances
11 | encoder.encodeIbftHeader = (block) => {
12 | let istExtraData = block.extraData.slice(66);
13 | let rlpExtraData = rlp.decode('0x' + istExtraData);
14 |
15 | let sig = '0x' + rlpExtraData[1].toString('hex');
16 |
17 | // Remove the committed seals
18 | committedSeals = rlpExtraData[2];
19 | rlpExtraData[2] = [];
20 |
21 | let rlpEncodedExtraDataSeal = rlp.encode(rlpExtraData);
22 |
23 | // Remove last 65 Bytes of extraData
24 | let extraBytes = utils.hexToBytes(block.extraData);
25 | let extraBytesShort = extraBytes.splice(1, 32);
26 | let extraDataShort = '0x' + utils.bytesToHex(extraBytesShort) + rlpEncodedExtraDataSeal.toString('hex');
27 |
28 | let header = [
29 | block.parentHash,
30 | block.sha3Uncles,
31 | block.miner,
32 | block.stateRoot,
33 | block.transactionsRoot,
34 | block.receiptsRoot,
35 | block.logsBloom,
36 | Web3Utils.toBN(block.difficulty),
37 | Web3Utils.toBN(block.number),
38 | block.gasLimit,
39 | block.gasUsed,
40 | Web3Utils.toBN(block.timestamp),
41 | extraDataShort,
42 | block.mixHash,
43 | block.nonce
44 | ];
45 |
46 | let encodedHeader = '0x' + rlp.encode(header).toString('hex');
47 | let encodedBlockHeaderHash = Web3Utils.sha3(encodedHeader);
48 |
49 | // Create the rlp encoded extra data
50 | rlpExtraData[1] = new Buffer([]);
51 | rlpExtraData[2] = [];
52 |
53 | rlpEncodedExtraDataSeal = rlp.encode(rlpExtraData);
54 |
55 | // Remove last 65 Bytes of extraData
56 | extraBytes = utils.hexToBytes(block.extraData);
57 | extraBytesShort = extraBytes.splice(1, 32);
58 | extraDataShort = '0x' + utils.bytesToHex(extraBytesShort) + rlpEncodedExtraDataSeal.toString('hex');
59 |
60 | header = [
61 | block.parentHash,
62 | block.sha3Uncles,
63 | block.miner,
64 | block.stateRoot,
65 | block.transactionsRoot,
66 | block.receiptsRoot,
67 | block.logsBloom,
68 | Web3Utils.toBN(block.difficulty),
69 | Web3Utils.toBN(block.number),
70 | block.gasLimit,
71 | block.gasUsed,
72 | Web3Utils.toBN(block.timestamp),
73 | extraDataShort,
74 | block.mixHash,
75 | block.nonce
76 | ];
77 |
78 | encodedUnsignedHeader = '0x' + rlp.encode(header).toString('hex');
79 | encodedUnsignedHeaderHash = Web3Utils.sha3(encodedUnsignedHeader);
80 |
81 | encodedCommittedSeals = '0x' + rlp.encode(committedSeals).toString('hex');
82 |
83 | return {
84 | unsigned: encodedUnsignedHeader,
85 | signed: encodedHeader,
86 | seal: encodedCommittedSeals
87 | };
88 | }
89 |
90 | // Encodes the block headers from clique returning the signed and unsigned instances
91 | encoder.encodeBlockHeader = (block) => {
92 | const signedHeader = [
93 | block.parentHash,
94 | block.sha3Uncles,
95 | block.miner,
96 | block.stateRoot,
97 | block.transactionsRoot,
98 | block.receiptsRoot,
99 | block.logsBloom,
100 | Web3Utils.toBN(block.difficulty),
101 | Web3Utils.toBN(block.number),
102 | block.gasLimit,
103 | block.gasUsed,
104 | Web3Utils.toBN(block.timestamp),
105 | block.extraData,
106 | block.mixHash,
107 | block.nonce
108 | ];
109 |
110 | // Remove last 65 Bytes of extraData
111 | const extraBytes = utils.hexToBytes(block.extraData);
112 | const extraBytesShort = extraBytes.splice(1, extraBytes.length-66);
113 | const extraDataSignature = '0x' + utils.bytesToHex(extraBytes.splice(extraBytes.length-65));
114 | const extraDataShort = '0x' + utils.bytesToHex(extraBytesShort);
115 |
116 | const unsignedHeader = [
117 | block.parentHash,
118 | block.sha3Uncles,
119 | block.miner,
120 | block.stateRoot,
121 | block.transactionsRoot,
122 | block.receiptsRoot,
123 | block.logsBloom,
124 | Web3Utils.toBN(block.difficulty),
125 | Web3Utils.toBN(block.number),
126 | block.gasLimit,
127 | block.gasUsed,
128 | Web3Utils.toBN(block.timestamp),
129 | extraDataShort, // extraData minus the signature
130 | block.mixHash,
131 | block.nonce
132 | ];
133 |
134 | const encodedSignedHeader = '0x' + rlp.encode(signedHeader).toString('hex');
135 | const signedHeaderHash = Web3Utils.sha3(encodedSignedHeader);
136 |
137 | const encodedUnsignedHeader = '0x' + rlp.encode(unsignedHeader).toString('hex');
138 | const unsignedHeaderHash = Web3Utils.sha3(encodedUnsignedHeader);
139 |
140 | return {
141 | unsigned: encodedUnsignedHeader,
142 | signed: encodedSignedHeader,
143 | rawunsigned: unsignedHeader,
144 | rawsigned: signedHeader,
145 | extraDataSignature: extraDataSignature,
146 | extraDataShort: extraDataShort,
147 | extraBytesShort: extraBytesShort
148 | };
149 | }
150 |
151 | // Takes the extraData field from a clique genesis block and finds the validators
152 | encoder.extractValidators = (extraData) => {
153 | genesisExtraData = utils.hexToBytes(extraData)
154 |
155 | // Remove dressin, 32 bytes pre validators, 65 bytes post validators, and extra byte for 0x
156 | extraDataValidators = genesisExtraData.splice(33, genesisExtraData.length-32-65-1)
157 |
158 | // Check that the validators length is factor of 20
159 | assert.equal(extraDataValidators.length%20, 0);
160 | numValidators = extraDataValidators.length / 20;
161 |
162 | let validators = [];
163 |
164 | // Append each new validator to the array
165 | for (i = 0; i < numValidators; ++i) {
166 | validator = extraDataValidators.splice(0, 20);
167 | validators.push('0x' + utils.bytesToHex(validator));
168 | }
169 |
170 | return validators;
171 | }
172 |
173 | encoder.appendBlockHeaders = (signedHeaders, signedHeaderIndices, unsignedHeaders, unsignedHeaderIndices, rlpHeaders) => {
174 | // Start creating the long list of block headers
175 | signedHeaders.push(rlpHeaders.signed);
176 | unsignedHeaders.push(rlpHeaders.unsigned);
177 |
178 | // Need to append the cumulative length
179 | if (signedHeaderIndices.length==0) {
180 | signedHeaderIndices.push(utils.hexToBytes(rlpHeaders.signed).splice(1).length);
181 | unsignedHeaderIndices.push(utils.hexToBytes(rlpHeaders.unsigned).splice(1).length);
182 | } else {
183 | signedHeaderIndices.push(utils.hexToBytes(rlpHeaders.signed).splice(1).length + signedHeaderIndices[signedHeaderIndices.length - 1]);
184 | unsignedHeaderIndices.push(utils.hexToBytes(rlpHeaders.unsigned).splice(1).length + unsignedHeaderIndices[unsignedHeaderIndices.length - 1]);
185 | }
186 |
187 | }
188 |
189 | module.exports = encoder;
--------------------------------------------------------------------------------
/test/helpers/utils.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | const crypto = require('crypto')
5 | const Web3 = require('web3');
6 | var web3;
7 |
8 | const utils = {};
9 | // Format required for sending bytes through eth client:
10 | // - hex string representation
11 | // - prefixed with 0x
12 | utils.bufToStr = b => '0x' + b.toString('hex')
13 |
14 | utils.gasPrice = 100000000000 // truffle fixed gas price
15 | utils.joinHex = arr => '0x' + arr.map(el => el.slice(2)).join('')
16 |
17 | utils.hexToBytes = (hex) => {
18 | for (var bytes = [], c = 0; c < hex.length; c += 2)
19 | bytes.push(parseInt(hex.substr(c, 2), 16));
20 | return bytes;
21 | }
22 |
23 | utils.bytesToHex = (bytes) => {
24 | for (var hex = [], i = 0; i < bytes.length; i++) {
25 | hex.push((bytes[i] >>> 4).toString(16));
26 | hex.push((bytes[i] & 0xF).toString(16));
27 | }
28 | return hex.join("");
29 | }
30 |
31 | utils.sha256 = x =>
32 | crypto
33 | .createHash('sha256')
34 | .update(x)
35 | .digest()
36 |
37 | utils.random32 = () => crypto.randomBytes(32)
38 |
39 | utils.randomHex = () => crypto.randomBytes(32).toString('hex');
40 |
41 | utils.randomArr = () => {
42 | const result = []
43 | const size =(Math.floor(Math.random() * 10) + 1);
44 | for(let i = size; 0 < i; i-- )
45 | result.push(randomHex())
46 | return result
47 | }
48 |
49 | utils.isSha256Hash = hashStr => /^0x[0-9a-f]{64}$/i.test(hashStr)
50 |
51 | const newSecretHashPair = () => {
52 | const secret = random32()
53 | const hash = sha256(secret)
54 | return {
55 | secret: bufToStr(secret),
56 | hash: bufToStr(hash),
57 | }
58 | }
59 |
60 | utils.sleep = ms => {
61 | return new Promise(resolve => setTimeout(resolve, ms));
62 | }
63 |
64 | utils.txGas = txReceipt => txReceipt.receipt.gasUsed * gasPrice
65 | utils.txLoggedArgs = txReceipt => txReceipt.logs[0].args
66 | utils.txContractId = txReceipt => txLoggedArgs(txReceipt).contractId
67 |
68 | // Takes a header and private key returning the signed data
69 | // Needs extraData just to be sure of the final byte
70 | utils.signHeader = (headerHash, privateKey, extraData) => {
71 | const sig = eth_util.ecsign(headerHash, privateKey)
72 | if (this._chainId > 0) {
73 | sig.v += this._chainId * 2 + 8
74 | }
75 |
76 | const pubKey = eth_util.ecrecover(headerHash, sig.v, sig.r, sig.s);
77 | const addrBuf = eth_util.pubToAddress(pubKey);
78 |
79 | const newSigBytes = Buffer.concat([sig.r, sig.s]);
80 | let newSig;
81 |
82 | const bytes = utils.hexToBytes(extraData)
83 | const finalByte = bytes.splice(bytes.length-1)
84 | if (finalByte.toString('hex')=="0") {
85 | newSig = newSigBytes.toString('hex') + '00';
86 | }
87 | if (finalByte.toString('hex')=="1") {
88 | newSig = newSigBytes.toString('hex') + '01';
89 | }
90 |
91 | return newSig;
92 | }
93 |
94 |
95 |
96 | utils.initWeb3 = (callback, provider) => {
97 | web3 = new Web3();
98 | var host = process.env.STANDARD_CONTRACTS_RPC_HOST || "localhost";
99 | if (provider == null) {
100 | web3.setProvider(new web3.providers.HttpProvider('http://' + host + ':8545'));
101 | } else {
102 | web3.setProvider(provider);
103 | }
104 | web3.eth.getAccounts(function (err, accs) {
105 | if (err)
106 | return callback(err);
107 | web3.eth.defaultAccount = accs[0];
108 | callback();
109 | });
110 | }
111 |
112 | utils.deploy = (ABI, bytecode, callback) => {
113 | new web3.eth.Contract(ABI, {data: bytecode, gas: "0xFFFFFFFFFFFF"}, function (err, contract) {
114 | if (err) {
115 | callback(err);
116 | // callback fires twice, we only want the second call when the contract is deployed
117 | } else if (contract.address) {
118 | callback(null, contract);
119 | }
120 | });
121 | }
122 |
123 | module.exports = utils;
--------------------------------------------------------------------------------
/test/ibft.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2019 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | /*
5 | Clique Validation contract test
6 |
7 | Tests here are standalone unit tests for clique module functionality.
8 | Other contracts have been mocked to simulate basic behaviour.
9 |
10 | Tests the clique scheme for block submission, validator signature verification and more.
11 | */
12 |
13 | const eth_util = require('ethereumjs-util');
14 | const utils = require('./helpers/utils.js');
15 | const encoder = require('./helpers/encoder.js');
16 | const Web3 = require('web3');
17 | const Web3Utils = require('web3-utils');
18 | const rlp = require('rlp');
19 | const sha3 = require('js-sha3').keccak_256
20 |
21 |
22 | const Ibft = artifacts.require("IBFT");
23 | const MockIon = artifacts.require("MockIon");
24 | const MockStorage = artifacts.require("MockStorage");
25 |
26 | const web3 = new Web3();
27 |
28 | web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
29 |
30 | require('chai')
31 | .use(require('chai-as-promised'))
32 | .should();
33 |
34 | function pad(n, width, z) {
35 | z = z || '0';
36 | n = n + '';
37 | return n.length >= width ? n : new Array(width - n.length + 1).join(z) + n;
38 | }
39 |
40 | const DEPLOYEDCHAINID = "0xab830ae0774cb20180c8b463202659184033a9f30a21550b89a2b406c3ac8075"
41 | const TESTCHAINID = "0x22b55e8a4f7c03e1689da845dd463b09299cb3a574e64c68eafc4e99077a7254"
42 |
43 |
44 | const VALIDATORS_BEFORE = [
45 | '0x4335d75841d8b85187cf651ed130774143927c79',
46 | '0x61d7d88dbc76259fcf1f26bc0b5763aebd67aead',
47 | '0x955425273ef777d6430d910f9a8b10adbe95fff6',
48 | '0xf00d3c728929e42000c8d92d1a7e6a666f12e6ed',
49 | '0xd42d697aa23f7b3e209259002b456c57af26edd6'
50 | ];
51 |
52 | const VALIDATORS_AFTER = [
53 | '0x4335d75841d8b85187cf651ed130774143927c79',
54 | '0x61d7d88dbc76259fcf1f26bc0b5763aebd67aead',
55 | '0x955425273ef777d6430d910f9a8b10adbe95fff6',
56 | '0xf00d3c728929e42000c8d92d1a7e6a666f12e6ed'
57 | ];
58 |
59 | const GENESIS_HASH = "0x6893c6fe9270461992e748db2f30aa1359babbd74d0392eb4c3476ef942eb5ec";
60 |
61 | const block = {
62 | difficulty: 1,
63 | extraData: "0xdc83010000886175746f6e69747988676f312e31302e34856c696e7578000000f90164f854944335d75841d8b85187cf651ed130774143927c799461d7d88dbc76259fcf1f26bc0b5763aebd67aead94955425273ef777d6430d910f9a8b10adbe95fff694f00d3c728929e42000c8d92d1a7e6a666f12e6edb8410c11022a97fcb2248a2d757a845b4804755702125f8b7ec6c06503ae0277ad996dc22f81431e8036b6cf9ef7d3c1ff1b65a255c9cb70dd2f4925951503a6fdbf01f8c9b8412d3849c86c8ba3ed9a79cdd71b1684364c4c4efb1f01e83ca8cf663f3c95f7ac64b711cd297527d42fb3111b8f78d5227182f38ccc442be5ac4dcb52efede89a01b84135de3661d0191247c7f835c8eb6d7939052c0da8ae234baf8bd208c00225e706112df9bad5bf773120ba4bbc55f6d18e478de43712c0cd3de7a3e2bfd65abb7c01b841735f482a051e6ad7fb76a815907e68d903b73eff4e472006e56fdeca8155cb575f4c1d3e98cf3a4b013331c1bd171d0d500243ac0e073a5fd382294c4fe996f000",
64 | gasLimit: 4877543,
65 | gasUsed: 0,
66 | hash: "0xed607d816f792bff503fc01bf8903b50aae5bbc6d00293350e38bba92cde40ab",
67 | logsBloom: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
68 | miner: "0x955425273ef777d6430d910f9a8b10adbe95fff6",
69 | mixHash: "0x63746963616c2062797a616e74696e65206661756c7420746f6c6572616e6365",
70 | nonce: "0x0000000000000000",
71 | number: 38,
72 | parentHash: "0x6893c6fe9270461992e748db2f30aa1359babbd74d0392eb4c3476ef942eb5ec",
73 | receiptsRoot: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
74 | sha3Uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
75 | size: 901,
76 | stateRoot: "0x4e64a3b5ab9c561f72836209e376d035a0aa23a1fc7251e5d21c3c8437fef58e",
77 | timestamp: 1549897775,
78 | totalDifficulty: 39,
79 | transactions: [],
80 | transactionsRoot: "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
81 | uncles: []
82 | };
83 |
84 | const block_add = {
85 | difficulty: 1,
86 | extraData: "0xdc83010000886175746f6e69747988676f312e31302e34856c696e7578000000f90179f869944335d75841d8b85187cf651ed130774143927c799461d7d88dbc76259fcf1f26bc0b5763aebd67aead94955425273ef777d6430d910f9a8b10adbe95fff694f00d3c728929e42000c8d92d1a7e6a666f12e6ed94d42d697aa23f7b3e209259002b456c57af26edd6b841a01291465dfa2b138d48f0f819c31ae9e707a2ee2f3bb93d1341371ab315c9473a4b93b6ccb2b9b29462da66c1a95b27e9254cdf9fcac731e84c7183772f091200f8c9b841ce258c674a9b7ec8bacd5386313c976cbf3dd3f63dd704f93b5e71155c3ce11f124bcf430e1c285e0bce060172930a2c8c15054a14b5629b5dcec069c87e570400b841640736f30ef4ee4baf68448d87020366da4ce6ad2d3872027bbcba8cbbad58e01f2e4e057075dad411f958753615e4141bce861f2780e0499a485741154c707601b841490aa29598b1a7ee0830799bc781b47bfb22c884e2ed2aedd6e9c7ca648e1b547cb469e92e5f375bc1bc3abc191cb180abc93bf3cb67009c75d397a1ab4717d901",
87 | gasLimit: 4882305,
88 | gasUsed: 52254,
89 | hash: "0xd9944319153421ebe524ad3648fbb733f8d8b4aaa75bca8e406fc3b8c171e568",
90 | logsBloom: "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
91 | miner: "0xf00d3c728929e42000c8d92d1a7e6a666f12e6ed",
92 | mixHash: "0x63746963616c2062797a616e74696e65206661756c7420746f6c6572616e6365",
93 | nonce: "0x0000000000000000",
94 | number: 39,
95 | parentHash: "0xed607d816f792bff503fc01bf8903b50aae5bbc6d00293350e38bba92cde40ab",
96 | receiptsRoot: "0x5340517c0dcd60ef9d9735035fcd4a55607eff320684f48796ff57b0a28c8933",
97 | sha3Uncles: "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
98 | size: 1066,
99 | stateRoot: "0x68ebd003e05d477e02be898089958e509ca2bff03fe4a9ca1bef2b24aefda03d",
100 | timestamp: 1549897776,
101 | totalDifficulty: 40,
102 | transactions: ["0x8c0faa1990b8b4e0ec8129cd8e2ccf5578be92ee9540361efad993b51179594c"],
103 | transactionsRoot: "0xd21dcc8688b5b3ab638474485516cda326615f0e8a9853e97589d198b01916b9",
104 | uncles: []
105 | };
106 |
107 |
108 |
109 | function hexToBytes(hex) {
110 | for (var bytes = [], c = 0; c < hex.length; c += 2)
111 | bytes.push(parseInt(hex.substr(c, 2), 16));
112 | return bytes;
113 | }
114 |
115 | function bytesToHex(bytes) {
116 | for (var hex = [], i = 0; i < bytes.length; i++) {
117 | hex.push((bytes[i] >>> 4).toString(16));
118 | hex.push((bytes[i] & 0xF).toString(16));
119 | }
120 | return hex.join("");
121 | }
122 |
123 |
124 | contract('Ibft.js', (accounts) => {
125 | const joinHex = arr => '0x' + arr.map(el => el.slice(2)).join('');
126 |
127 | const watchEvent = (eventObj) => new Promise((resolve,reject) => eventObj.watch((error,event) => error ? reject(error) : resolve(event)));
128 |
129 | let ion;
130 | let ibft;
131 | let storage;
132 |
133 | beforeEach('setup contract for each test', async function () {
134 | ion = await MockIon.new(DEPLOYEDCHAINID);
135 | ibft = await Ibft.new(ion.address);
136 | storage = await MockStorage.new(ion.address);
137 |
138 | })
139 |
140 | describe('Register Chain', () => {
141 | it('Successful Register Chain', async () => {
142 | // Successfully add id of another chain
143 | let tx = await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
144 | console.log("\tGas used to register chain = " + tx.receipt.gasUsed.toString() + " gas");
145 | let chainExists = await ibft.chains(TESTCHAINID);
146 |
147 | assert(chainExists);
148 |
149 | let chainHead = await ibft.m_chainHeads(TESTCHAINID);
150 | assert.equal(chainHead, GENESIS_HASH);
151 | })
152 |
153 | it('Fail Register Chain Twice', async () => {
154 | // Successfully add id of another chain
155 | let tx = await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
156 | console.log("\tGas used to register chain = " + tx.receipt.gasUsed.toString() + " gas");
157 | let chainExists = await ibft.chains(TESTCHAINID);
158 |
159 | assert(chainExists);
160 |
161 | let chainHead = await ibft.m_chainHeads(TESTCHAINID);
162 | assert.equal(chainHead, GENESIS_HASH);
163 |
164 | // Fail adding id of this chain
165 | await ibft.RegisterChain(DEPLOYEDCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address).should.be.rejected;
166 |
167 | // Fail adding id of chain already initialised
168 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address).should.be.rejected;
169 | })
170 |
171 | it('Check Validators', async () => {
172 | // Successfully add id of another chain
173 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
174 |
175 | let registeredValidators = await ibft.getValidators.call(TESTCHAINID);
176 |
177 | for (let i = 0; i < VALIDATORS_BEFORE.length; i++) {
178 | let validatorExists = registeredValidators.map(v => v.toLowerCase()).some(v => { return v == VALIDATORS_BEFORE[i] });;
179 | assert(validatorExists);
180 | }
181 | })
182 |
183 | it('Check Genesis Hash', async () => {
184 | // Successfully add id of another chain
185 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
186 |
187 | let chainHead = await ibft.m_chainHeads(TESTCHAINID);
188 | assert.equal(chainHead, GENESIS_HASH);
189 | })
190 | })
191 |
192 | describe('Submit Block', () => {
193 | it('Successful Submit block', async () => {
194 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
195 |
196 | let chainHead = await ibft.m_chainHeads(TESTCHAINID);
197 | assert.equal(chainHead, GENESIS_HASH);
198 |
199 | rlpHeader = encoder.encodeIbftHeader(block);
200 |
201 | // Submit block should succeed
202 | const validationReceipt = await ibft.SubmitBlock(TESTCHAINID, rlpHeader.unsigned, rlpHeader.signed, rlpHeader.seal, storage.address);
203 | console.log("\tGas used to submit block = " + validationReceipt.receipt.gasUsed.toString() + " gas");
204 |
205 | let event = validationReceipt.receipt.rawLogs.some(l => { return l.topics[0] == '0x' + sha3("AddedBlock()") });
206 | assert.ok(event, "Stored event not emitted");
207 |
208 | const submittedEvent = validationReceipt.logs.find(l => { return l.event == 'BlockSubmitted' });
209 | assert.equal(Web3Utils.sha3(rlpHeader.signed), submittedEvent.args.blockHash);
210 |
211 | let addedBlockHash = await ibft.m_chainHeads.call(TESTCHAINID);
212 | assert.equal(addedBlockHash, block.hash);
213 |
214 | let header = await ibft.m_blockheaders(TESTCHAINID, block.hash);
215 |
216 | // Separate fetched header info
217 | parentHash = header[2];
218 |
219 | // Assert that block was persisted correctly
220 | assert.equal(parentHash, block.parentHash);
221 |
222 | chainHead = await ibft.m_chainHeads(TESTCHAINID);
223 | assert.equal(chainHead, block.hash);
224 | })
225 |
226 | it('Submit Sequential Blocks with Additional Validator', async () => {
227 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
228 |
229 | rlpHeader = encoder.encodeIbftHeader(block);
230 |
231 | // Submit block should succeed
232 | let validationReceipt = await ibft.SubmitBlock(TESTCHAINID, rlpHeader.unsigned, rlpHeader.signed, rlpHeader.seal, storage.address);
233 | console.log("\tGas used to submit block = " + validationReceipt.receipt.gasUsed.toString() + " gas");
234 | let event = validationReceipt.receipt.rawLogs.some(l => { return l.topics[0] == '0x' + sha3("AddedBlock()") });
235 | assert.ok(event, "Stored event not emitted");
236 |
237 | rlpHeader = encoder.encodeIbftHeader(block_add);
238 |
239 | validationReceipt = await ibft.SubmitBlock(TESTCHAINID, rlpHeader.unsigned, rlpHeader.signed, rlpHeader.seal, storage.address);
240 | event = validationReceipt.receipt.rawLogs.some(l => { return l.topics[0] == '0x' + sha3("AddedBlock()") });
241 | assert.ok(event, "Stored event not emitted");
242 |
243 | const submittedEvent = validationReceipt.logs.find(l => { return l.event == 'BlockSubmitted' });
244 | assert.equal(Web3Utils.sha3(rlpHeader.signed), submittedEvent.args.blockHash);
245 |
246 | let addedBlockHash = await ibft.m_chainHeads.call(TESTCHAINID);
247 | assert.equal(addedBlockHash, block_add.hash);
248 |
249 | let header = await ibft.m_blockheaders(TESTCHAINID, block_add.hash);
250 |
251 | // Separate fetched header info
252 | parentHash = header[2];
253 |
254 | // Assert that block was persisted correctly
255 | assert.equal(parentHash, block_add.parentHash);
256 |
257 | // Check new validators
258 | let registeredValidators = await ibft.getValidators.call(TESTCHAINID);
259 | for (let i = 0; i < VALIDATORS_AFTER.length; i++) {
260 | let validatorExists = registeredValidators.map(v => v.toLowerCase()).some(v => { return v == VALIDATORS_AFTER[i] });;
261 | assert(validatorExists);
262 | }
263 | })
264 |
265 | it('Fail Submit Block with Unknown Validator', async () => {
266 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
267 |
268 | block.extraData = "0xdc83010000886175746f6e69747988676f312e31302e34856c696e7578000000f90164f854941cb62855cd70774634c85c9acb7c3070ce692936946b2f468af3d0ba2f3a09712faea4d379c2e891a194a667ea98809a69724c6672018bd7db799cd3fefc94c2054df3acfdbe5b221866b25e09026734ca5572b841012edd2e5936deaf4c0ee17698dc0fda832bb51a81d929ae3156d73e5475123c19d162cf1e434637c16811d63d1d3b587906933d75e25cedf7bef59e8fa8375d01f8c9b841719c5bc521721e71ff7fafff09fdff4037e678a77a816b08d45b89d55f35edc94b5c51cc3eeba79d3de291c3c46fbf04faec4952e7d0836be9ad5d855f525c9301b841a7c9eed0337f92a5d4caf6f57b3b59ba10a14ea615c6264fc82fcf5b2e4b626f701fd3596cd1f8639b37a41cb4f3a7582bb530790441de73e6e3449284127b4d00b841210db6ef89906ef1c77538426d29b8440a1c987d508e396776e63515df2a345767c195dc540cfabdf86d696c73b4a24632445565d322d8e45fa2668ec5e6c0e000";
269 |
270 | rlpHeader = encoder.encodeIbftHeader(block);
271 |
272 | // Submit block should not succeed
273 | await ibft.SubmitBlock(TESTCHAINID, rlpHeader.unsigned, rlpHeader.signed, rlpHeader.seal, storage.address).should.be.rejected;
274 |
275 | })
276 |
277 | it('Fail Submit Block with Insufficient Seals', async () => {
278 | await ibft.RegisterChain(TESTCHAINID, VALIDATORS_BEFORE, GENESIS_HASH, storage.address);
279 |
280 | let badExtraData = "0xf90164f854944335d75841d8b85187cf651ed130774143927c799461d7d88dbc76259fcf1f26bc0b5763aebd67aead94955425273ef777d6430d910f9a8b10adbe95fff694f00d3c728929e42000c8d92d1a7e6a666f12e6edb8410c11022a97fcb2248a2d757a845b4804755702125f8b7ec6c06503ae0277ad996dc22f81431e8036b6cf9ef7d3c1ff1b65a255c9cb70dd2f4925951503a6fdbf01f8c9b8412d3849c86c8ba3ed9a79cdd71b1684364c4c4efb1f01e83ca8cf663f3c95f7ac64b711cd297527d42fb3111b8f78d5227182f38ccc442be5ac4dcb52efede89a01b84135de3661d0191247c7f835c8eb6d7939052c0da8ae234baf8bd208c00225e706112df9bad5bf773120ba4bbc55f6d18e478de43712c0cd3de7a3e2bfd65abb7c01b841735f482a051e6ad7fb76a815907e68d903b73eff4e472006e56fdeca8155cb575f4c1d3e98cf3a4b013331c1bd171d0d500243ac0e073a5fd382294c4fe996f000";
281 |
282 | // Remove seal from extradata
283 | const decodedExtraData = rlp.decode(badExtraData);
284 | decodedExtraData[2].pop()
285 |
286 | // Reapply the rlp encoded istanbul extra minus single seal
287 | encodedExtraData = rlp.encode(decodedExtraData).toString('hex');
288 | block.extraData = "0xdc83010000886175746f6e69747988676f312e31302e34856c696e7578000000" + encodedExtraData;
289 | rlpHeader = encoder.encodeIbftHeader(block);
290 |
291 | // Submit block should not succeed
292 | await ibft.SubmitBlock(TESTCHAINID, rlpHeader.unsigned, rlpHeader.signed, rlpHeader.seal, storage.address).should.be.rejected;
293 |
294 | })
295 |
296 | })
297 |
298 | });
299 |
--------------------------------------------------------------------------------
/test/ion.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | /*
5 | Ion Mediator contract test
6 |
7 | Tests here are standalone unit tests for Ion functionality.
8 | Other contracts have been mocked to simulate basic behaviour.
9 |
10 | Tests the central mediator for block passing and validation registering.
11 | */
12 |
13 | const Web3Utils = require('web3-utils');
14 | const utils = require('./helpers/utils.js');
15 | const rlp = require('rlp');
16 | const async = require('async')
17 | const sha3 = require('js-sha3').keccak_256
18 |
19 | // Connect to the Test RPC running
20 | const Web3 = require('web3');
21 | const web3 = new Web3();
22 | web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
23 |
24 | const Ion = artifacts.require("Ion");
25 | const MockValidation = artifacts.require("MockValidation");
26 | const MockStorage = artifacts.require("MockStorage");
27 |
28 | require('chai')
29 | .use(require('chai-as-promised'))
30 | .should();
31 |
32 | const DEPLOYEDCHAINID = "0xab830ae0774cb20180c8b463202659184033a9f30a21550b89a2b406c3ac8075"
33 |
34 | const TESTCHAINID = "0x22b55e8a4f7c03e1689da845dd463b09299cb3a574e64c68eafc4e99077a7254"
35 |
36 | /*
37 | TESTRPC TEST DATA
38 | */
39 |
40 | const TESTBLOCK = {
41 | difficulty: 2,
42 | extraData: '0xd68301080d846765746886676f312e3130856c696e7578000000000000000000583a78dd245604e57368cb2688e42816ebc86eff73ee219dd96b8a56ea6392f75507e703203bc2cc624ce6820987cf9e8324dd1f9f67575502fe6060d723d0e100',
43 | gasLimit: 7509409,
44 | gasUsed: 2883490,
45 | hash: '0x694752333dd1bd0f806cc6ef1063162f4f330c88f9dcd9e61174fcf5e4927eb7',
46 | logsBloom: '0x22440000020000090000000000000000041000080000008000088000080000000200000400000800000000000000400000000000000000000010000008020102000000000000080000000008800000000000022000000004000000010000000000080000000620400440100010200400082000000000000080040010000100020020000000000000080080000001000000000100000400480000000002000000002000080018000008108000100000000000000000020000050010001004000000000102000040004000000000000000000000004400000000000000000000000208000000000400008200020000004022400000000004000200848000000000',
47 | miner: '0x0000000000000000000000000000000000000000',
48 | mixHash: '0x0000000000000000000000000000000000000000000000000000000000000000',
49 | nonce: '0x0000000000000000',
50 | number: 2657422,
51 | parentHash: '0x3471555ab9a99528f02f9cdd8f0017fe2f56e01116acc4fe7f78aee900442f35',
52 | receiptsRoot: '0x907121bec78b40e8256fac47867d955c560b321e93fc9f046f919ffb5e3823ff',
53 | sha3Uncles: '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347',
54 | size: 4848,
55 | stateRoot: '0xf526f481ffb6c3c56956d596f2b23e1f7ff17c810ba59efb579d9334a1765444',
56 | timestamp: 1531931421,
57 | totalDifficulty: 5023706,
58 | transactions:
59 | [ '0x7adbc5ee3712552a1e85962c3ea3d82394cfed7960d60c12d60ebafe67445450',
60 | '0x6be870e6dfb11894b64371560ec39e563cef91642afd193bfa67874f3508a282',
61 | '0x5ba6422455cb7127958df15c453bfe60d92921b647879864b531fd6589e36af4',
62 | '0xa2597e6fe6882626e12055b1378025aa64a85a03dd23f5dc66034f2ef3746810',
63 | '0x7ffb940740050ae3604f99a4eef07c83de5d75076cae42cb1561c370cba3a0a3',
64 | '0x4d6326a6d4cf606c7e44a4ae6710acd3876363bcaabd1b1b59d29fff4da223c5',
65 | '0x10b3360ef00cd7c4faf826365fddbd33938292c98c55a4cdb37194a142626f63',
66 | '0x655290cb44be2e64d3b1825a86d5647579015c5cffb03ede7f67eb34cea6b97f',
67 | '0x6b5e025ea558f4872112a39539ce9a819bfbb795b04eefcc45e1cf5ea947614c',
68 | '0xefd68b516babcf8a4ca74a358cfca925d9d2d5177ef7b859f3d9183ff522efe8',
69 | '0xa056eeeeb098fd5adb283e12e77a239797c96860c21712963f183937613d3391',
70 | '0xa5d1adf694e3442975a13685a9c7d9013c05a4fdcea5bc827566a331b2fead2b',
71 | '0x95a47360f89c48f0b1a484cbeee8816b6a0e2fc321bdb9db48082bd7272b4ebc',
72 | '0x896d29a87393c6607844fa545d38eb96056d5310a6b4e056dc00adde67c24be2',
73 | '0xef3ce2ad9259920094f7fd5ad00453b35888662696ae9b85a393e55cde3ec28d',
74 | '0x2de8af9b4e84b3ac93adfce81964cc69bafd0a2dbcac3a5f7628ee9e56fd1c8a',
75 | '0x2790cdb3377f556e8f5bc8eaaf9c6c0d36d0f242c2e4226af2aac0203f43019b',
76 | '0x98ae65246249785bd1ac8157900f7e1a2c69d5c3b3ffc97d55b9eacab3e212f0',
77 | '0x7d4f090c58880761eaaab1399864d4a52631db8f0b21bfb7051f9a214ad07993',
78 | '0xafc3ab60059ed38e71c7f6bea036822abe16b2c02fcf770a4f4b5fffcbfe6e7e',
79 | '0x2af8f6c49d1123077f1efd13764cb2a50ff922fbaf49327efc44c6048c38c968',
80 | '0x6d5e1753dc91dae7d528ab9b02350e726e006a5591a5d315a34a46e2a951b3fb',
81 | '0xdc864827159c7fde6bbd1672ed9a90ce5d69f5d0c81761bf689775d19a90387e',
82 | '0x22fb4d90a7125988b2857c50709e544483f898cb1e8036477f9ddd94b177bf93',
83 | '0x999c2e2ba342bed4ccedea01d638db3bbd1abd6d10784c317843880841db6dec',
84 | '0x11355abb5fe745ed458b2a78e116f4a8c2fe046a131eafe08f30d23bd9d10394' ],
85 | transactionsRoot: '0x07f36c7ad26564fa65daebda75a23dfa95d660199092510743f6c8527dd72586',
86 | uncles: []
87 | }
88 |
89 | const signedHeader = [
90 | TESTBLOCK.parentHash,
91 | TESTBLOCK.sha3Uncles,
92 | TESTBLOCK.miner,
93 | TESTBLOCK.stateRoot,
94 | TESTBLOCK.transactionsRoot,
95 | TESTBLOCK.receiptsRoot,
96 | TESTBLOCK.logsBloom,
97 | Web3Utils.toBN(TESTBLOCK.difficulty),
98 | Web3Utils.toBN(TESTBLOCK.number),
99 | TESTBLOCK.gasLimit,
100 | TESTBLOCK.gasUsed,
101 | Web3Utils.toBN(TESTBLOCK.timestamp),
102 | TESTBLOCK.extraData,
103 | TESTBLOCK.mixHash,
104 | TESTBLOCK.nonce
105 | ];
106 |
107 | const TEST_SIGNED_HEADER = '0x' + rlp.encode(signedHeader).toString('hex');
108 |
109 | contract('Ion.js', (accounts) => {
110 | let ion;
111 | let validation;
112 | let storage;
113 |
114 | beforeEach('setup contract for each test', async function () {
115 | ion = await Ion.new(DEPLOYEDCHAINID);
116 | validation = await MockValidation.new(ion.address);
117 | storage = await MockStorage.new(ion.address);
118 | })
119 |
120 | it('Deploy Ion', async () => {
121 | let chainId = await ion.chainId();
122 |
123 | assert.equal(chainId, DEPLOYEDCHAINID);
124 | })
125 |
126 | describe('Register Validation', () => {
127 | it('Successful registration', async () => {
128 | // Successfully add id of another chain
129 | let registered = await validation.register.call();
130 | await validation.register();
131 |
132 | assert(registered);
133 | })
134 |
135 | it('Fail second registration', async () => {
136 | // Successfully add id of another chain
137 | let registered = await validation.register.call();
138 | await validation.register();
139 |
140 | assert(registered);
141 |
142 | // Fail second attempt to register validation
143 | await validation.register.call().should.be.rejected;
144 | })
145 |
146 | it('Fail registration by non-contract', async () => {
147 | await ion.registerValidationModule().should.be.rejected;
148 | })
149 | })
150 |
151 | describe('Store Block', () => {
152 | it('Successful Store Block', async () => {
153 | await validation.register();
154 |
155 | const tx = await validation.SubmitBlock(storage.address, TESTCHAINID, TEST_SIGNED_HEADER);
156 | let event = tx.receipt.rawLogs.some(l => { return l.topics[0] == '0x' + sha3("AddedBlock()") });
157 | assert.ok(event, "Block not stored");
158 | })
159 |
160 | it('Fail Store Block by unregistered validation', async () => {
161 | await validation.SubmitBlock(storage.address, TESTCHAINID, TEST_SIGNED_HEADER).should.be.rejected;
162 | })
163 |
164 | it('Fail Store Block by non-contract', async () => {
165 | await ion.storeBlock(storage.address, TESTCHAINID, TEST_SIGNED_HEADER).should.be.rejected;
166 | })
167 |
168 | it('Fail Store Block with non contract storage address', async () => {
169 | await ion.storeBlock(accounts[0], TESTCHAINID, TEST_SIGNED_HEADER).should.be.rejected;
170 | })
171 | })
172 | })
--------------------------------------------------------------------------------
/test/patricia_trie_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert');
2 | const path = require('path');
3 | const utils = require('./helpers/utils.js');
4 | const async = require('async');
5 |
6 | const PatriciaTrieTest = artifacts.require("PatriciaTrieTest");
7 |
8 | contract('Patricia Trie', (accounts) => {
9 | describe('VerifyProof', async function () {
10 | it('should successfully verify all proofs', async function () {
11 |
12 | let patriciatrietest = await PatriciaTrieTest.new();
13 |
14 | for( let i = 0; i < testData['success'].length; i++) {
15 | data = testData['success'][i];
16 | let result = await patriciatrietest.testVerify.call(data.value, data.nodes, data.path, data.rootHash);
17 | assert.equal(result, true);
18 | }
19 | });
20 |
21 | it('should fail verifying all proofs with incompatible data', async function () {
22 |
23 | let patriciatrietest = await PatriciaTrieTest.new();
24 |
25 | for( let i = 0; i < testData['fail'].length; i++) {
26 | data = testData['fail'][i];
27 | let result = await patriciatrietest.testVerify.call(data.value, data.nodes, data.path, data.rootHash);
28 | assert.equal(result, false);
29 | }
30 | });
31 |
32 | });
33 | });
34 |
35 | const testData = {
36 | "success": [{
37 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
38 | "path": "0x61",
39 | "value": "0x857465737431",
40 | "nodes": "0xf83bf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080"
41 | }, {
42 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
43 | "path": "0x826162",
44 | "value": "0x74",
45 | "nodes": "0xf87ff839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080"
46 | }, {
47 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
48 | "path": "0x83616263",
49 | "value": "0x857465737433",
50 | "nodes": "0xf87ff839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080"
51 | }, {
52 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
53 | "path": "0x8461626564",
54 | "value": "0x857465737435",
55 | "nodes": "0xf8cbf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080e583161626a06b1a1127b4c489762c8259381ff9ecf51b7ef0c2879b89e72c993edc944f1ccce5808080ca8220648685746573743480ca822064868574657374358080808080808080808080"
56 | }, {
57 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
58 | "path": "0x8461626364",
59 | "value": "0x857465737434",
60 | "nodes": "0xf8cbf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080e583161626a06b1a1127b4c489762c8259381ff9ecf51b7ef0c2879b89e72c993edc944f1ccce5808080ca8220648685746573743480ca822064868574657374358080808080808080808080"
61 | }],
62 | "fail": [{
63 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
64 | "path": "0x61",
65 | "value": "0x857465737432",
66 | "nodes": "0xf83bf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080"
67 | }, {
68 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
69 | "path": "0x826163",
70 | "value": "0x75",
71 | "nodes": "0xf87ff839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080"
72 | }, {
73 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
74 | "path": "0x83616263",
75 | "value": "0x857465737434",
76 | "nodes": "0xf87ff839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080"
77 | }, {
78 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
79 | "path": "0x8461626564",
80 | "value": "0x857465737435",
81 | "nodes": "0xf8cbf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080e583161626a06b1a1127b4c489762c8259381ff9ecf51b7ef0c2879b89e72c993edc944f1ccce5808080ca8220648685746573743480ca822064868574657374358080808080808080808085"
82 | }, {
83 | "rootHash": "0xda2e968e25198a0a41e4dcdc6fcb03b9d49274b3d44cb35d921e4ebe3fb5c54c",
84 | "path": "0x8461626364",
85 | "value": "0x857465737435",
86 | "nodes": "0xf8cbf839808080808080c8318685746573743180a0207947cf85c03bd3d9f9ff5119267616318dcef0e12de2f8ca02ff2cdc720a978080808080808080f8428080c58320616274cc842061626386857465737433a05d495bd9e35ab0dab60dec18b21acc860829508e7df1064fce1f0b8fa4c0e8b2808080808080808080808080e583161626a06b1a1127b4c489762c8259381ff9ecf51b7ef0c2879b89e72c993edc944f1ccce5808080ca8220648685746573743480ca822064868574657374358080808080808080808080"
87 | }]
88 | }
--------------------------------------------------------------------------------
/test/storage-fabric.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2016-2018 Clearmatics Technologies Ltd
2 | // SPDX-License-Identifier: LGPL-3.0+
3 |
4 | /*
5 | Fabric Storage contract test
6 |
7 | Tests here are standalone unit tests for Ion functionality.
8 | Other contracts have been mocked to simulate basic behaviour.
9 |
10 | Tests Fabric block structure decoding and verification of state transitions.
11 | */
12 |
13 | const rlp = require('rlp');
14 | const async = require('async')
15 | const util = require('util');
16 |
17 | // Connect to the Test RPC running
18 | const Web3 = require('web3');
19 | const web3 = new Web3();
20 | web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
21 |
22 | const MockIon = artifacts.require("MockIon");
23 | const FabricStore = artifacts.require("FabricStore");
24 |
25 | require('chai')
26 | .use(require('chai-as-promised'))
27 | .should();
28 |
29 | const DEPLOYEDCHAINID = "0xab830ae0774cb20180c8b463202659184033a9f30a21550b89a2b406c3ac8075"
30 | const TESTCHAINID = "0x22b55e8a4f7c03e1689da845dd463b09299cb3a574e64c68eafc4e99077a7254"
31 |
32 | const TESTDATA = [{
33 | channelId: "orgchannel",
34 | blocks: [{
35 | hash: "vBmkcC8xbLMAUK-wkLMYGDz9qFdu1n8SbsHp62Of_-o",
36 | number: 4,
37 | prevHash: "hnw1EQE3SXA_LCUsRWXAj5nZ_JjLPm6DGiRn-g7g4Pc",
38 | dataHash: "_xuKFW3Po3gNBXjXac11M39a-o-_92_PC6DWBUWnk4I",
39 | timestampS: 1548756504,
40 | timestampN: 121452526,
41 | transactions: [{
42 | txId: "d4a03d5b71ac3fab92b90bae047c9c5e6ccf0b4396be6807c1724fc0139f999b",
43 | nsrw: [{
44 | namespace: "ExampleCC",
45 | readsets: [{
46 | key: "A",
47 | version: {
48 | blockNumber: 3,
49 | txNumber: 0
50 | }
51 | }, {
52 | key: "B",
53 | version: {
54 | blockNumber: 3,
55 | txNumber: 0
56 | }
57 | }],
58 | writesets: [{
59 | key: "A",
60 | isDelete: "false",
61 | value: "0"
62 | }, {
63 | key: "B",
64 | isDelete: "false",
65 | value: "3"
66 | }]
67 | }, {
68 | namespace: "lscc",
69 | readsets: [{
70 | key: "ExampleCC",
71 | version: {
72 | blockNumber: 3,
73 | txNumber: 0
74 | }
75 | }],
76 | writesets: []
77 | }]
78 | }]
79 | }]
80 | }]
81 |
82 | const formattedData = [[
83 | TESTDATA[0].channelId,
84 | [
85 | TESTDATA[0].blocks[0].hash,
86 | TESTDATA[0].blocks[0].number,
87 | TESTDATA[0].blocks[0].prevHash,
88 | TESTDATA[0].blocks[0].dataHash,
89 | TESTDATA[0].blocks[0].timestampS,
90 | TESTDATA[0].blocks[0].timestampN,
91 | [[
92 | TESTDATA[0].blocks[0].transactions[0].txId,
93 | [[
94 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].namespace,
95 | [[
96 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].key,
97 | [
98 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].version.blockNumber,
99 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].version.txNumber
100 | ]
101 | ], [
102 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].key,
103 | [
104 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].version.blockNumber,
105 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].version.txNumber
106 | ]
107 | ]],
108 | [[
109 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].key,
110 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].isDelete,
111 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].value
112 | ],[
113 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].key,
114 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].isDelete,
115 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].value
116 | ]]
117 | ], [
118 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].namespace,
119 | [[
120 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].key,
121 | [
122 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].version.blockNumber,
123 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].version.txNumber
124 | ]
125 | ]],
126 | []
127 | ]]
128 | ]]
129 | ]
130 | ]];
131 |
132 |
133 | contract('FabricStore.sol', (accounts) => {
134 | let ion;
135 | let storage;
136 | let rlpEncodedBlock_Gen = "0x" + rlp.encode(formattedData).toString('hex');
137 | // Generated by CLI https://github.com/Shirikatsu/fabric-examples/blob/fc3ff7243f282a4edf21c6667e71ab02e759c3c5/fabric-cli/cmd/fabric-cli/printer/encoder.go#L40
138 | let rlpEncodedBlock = "0xf90127f901248a6f72676368616e6e656cf90116ab76426d6b63433878624c4d41554b2d776b4c4d5947447a3971466475316e38536273487036324f665f2d6f04ab686e7731455145335358415f4c435573525758416a356e5a5f4a6a4c506d36444769526e2d673767345063ab5f78754b465733506f33674e42586a58616331314d3339612d6f2d5f39325f50433644574255576e6b3449845c50261884073d37eef885f883b84064346130336435623731616333666162393262393062616530343763396335653663636630623433393662653638303763313732346663303133396639393962f83fe8894578616d706c654343cac441c20380c442c20380d2c8418566616c736530c8428566616c736533d5846c736363cecd894578616d706c654343c20380c0";
139 |
140 | beforeEach('setup contract for each test', async function () {
141 | ion = await MockIon.new(DEPLOYEDCHAINID);
142 | storage = await FabricStore.new(ion.address);
143 | })
144 |
145 | describe('Block Encode', () => {
146 | it('Correct Encoding', async () => {
147 | assert.equal(rlpEncodedBlock_Gen, rlpEncodedBlock);
148 | })
149 | })
150 |
151 | describe('Register Chain', () => {
152 | it('Successful Register Chain', async () => {
153 | // Successfully add id of another chain
154 | await ion.addChain(storage.address, TESTCHAINID);
155 |
156 | let chainRegistered = await storage.m_chains(TESTCHAINID);
157 | assert(chainRegistered);
158 |
159 | let chainId = await storage.m_networks.call(TESTCHAINID);
160 | assert.equal(TESTCHAINID, chainId);
161 | })
162 |
163 | it('Fail Register Current Chain', async () => {
164 | // Fail adding deployment chain id
165 | await ion.addChain(storage.address, DEPLOYEDCHAINID).should.be.rejected;
166 | })
167 |
168 | it('Fail Register Chain Twice', async () => {
169 | // Successfully add id of another chain
170 | await ion.addChain(storage.address, TESTCHAINID);
171 |
172 | let chainRegistered = storage.m_chains(TESTCHAINID);
173 | assert(chainRegistered);
174 |
175 | await ion.addChain(storage.address, TESTCHAINID).should.be.rejected;
176 | })
177 | })
178 |
179 | describe('Add Block', () => {
180 | it('Successful Add Block', async () => {
181 | // Successfully add id of another chain
182 | await ion.addChain(storage.address, TESTCHAINID);
183 |
184 | let receipt = await ion.storeBlock(storage.address, TESTCHAINID, rlpEncodedBlock);
185 | console.log("\tGas used to store fabric block: %d", receipt.receipt.gasUsed);
186 |
187 | let block = await storage.getBlock.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].hash);
188 |
189 | assert.equal(block[0], TESTDATA[0].blocks[0].number);
190 | assert.equal(block[1], TESTDATA[0].blocks[0].hash);
191 | assert.equal(block[2], TESTDATA[0].blocks[0].prevHash);
192 | assert.equal(block[3], TESTDATA[0].blocks[0].dataHash);
193 | assert.equal(block[4], TESTDATA[0].blocks[0].timestampS);
194 | assert.equal(block[5], TESTDATA[0].blocks[0].timestampN);
195 | assert.equal(block[6], TESTDATA[0].blocks[0].transactions[0].txId);
196 |
197 | let tx = await storage.getTransaction.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].txId);
198 |
199 | assert.equal(tx[0], TESTDATA[0].blocks[0].hash);
200 | assert.equal(tx[1], TESTDATA[0].blocks[0].transactions[0].nsrw[0].namespace + "," + TESTDATA[0].blocks[0].transactions[0].nsrw[1].namespace);
201 |
202 | let txExists = await storage.isTransactionExists.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].txId);
203 |
204 | assert(txExists);
205 |
206 | let nsrw = await storage.getNSRW.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].txId, TESTDATA[0].blocks[0].transactions[0].nsrw[0].namespace);
207 |
208 | let expectedReadset = util.format("{ key: %s, version: { blockNo: %d, txNo: %d } } { key: %s, version: { blockNo: %d, txNo: %d } } ",
209 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].key,
210 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].version.blockNumber,
211 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[0].version.txNumber,
212 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].key,
213 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].version.blockNumber,
214 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].readsets[1].version.txNumber
215 | )
216 |
217 | assert.equal(expectedReadset, nsrw[0]);
218 |
219 | let expectedWriteset = util.format("{ key: %s, isDelete: %s, value: %s } { key: %s, isDelete: %s, value: %s } ",
220 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].key,
221 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].isDelete,
222 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].value,
223 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].key,
224 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].isDelete,
225 | TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].value
226 | )
227 |
228 | assert.equal(expectedWriteset, nsrw[1]);
229 |
230 | nsrw = await storage.getNSRW.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].txId, TESTDATA[0].blocks[0].transactions[0].nsrw[1].namespace);
231 |
232 | expectedReadset = util.format("{ key: %s, version: { blockNo: %d, txNo: %d } } ",
233 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].key,
234 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].version.blockNumber,
235 | TESTDATA[0].blocks[0].transactions[0].nsrw[1].readsets[0].version.txNumber
236 | )
237 |
238 | assert.equal(expectedReadset, nsrw[0]);
239 |
240 | expectedWriteset = "";
241 | assert.equal(expectedWriteset, nsrw[1]);
242 |
243 | let state = await storage.getState.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].key);
244 |
245 | assert.equal(state[0], TESTDATA[0].blocks[0].number);
246 | assert.equal(state[1], 0);
247 | assert.equal(state[2], TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[0].value);
248 |
249 | state = await storage.getState.call(TESTCHAINID, TESTDATA[0].channelId, TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].key);
250 |
251 | assert.equal(state[0], TESTDATA[0].blocks[0].number);
252 | assert.equal(state[1], 0);
253 | assert.equal(state[2], TESTDATA[0].blocks[0].transactions[0].nsrw[0].writesets[1].value);
254 | })
255 |
256 | it('Fail Add Block from unregistered chain', async () => {
257 | await ion.storeBlock(storage.address, TESTCHAINID, rlpEncodedBlock).should.be.rejected;
258 | })
259 |
260 | it('Fail Add Block from non-ion', async () => {
261 | await ion.addChain(storage.address, TESTCHAINID);
262 |
263 | await storage.addBlock(TESTCHAINID, rlpEncodedBlock).should.be.rejected;
264 | })
265 |
266 | it('Fail Add Block with malformed data', async () => {
267 | // Successfully add id of another chain
268 | await ion.addChain(storage.address, TESTCHAINID);
269 |
270 | await ion.storeBlock(storage.address, TESTCHAINID, "0xf86707843b9aca008257c39461621bcf02914668f8404c1f860e92fc1893f74c8084457094cc1ba07e2ebe15f4ece2fd8ffc9a49d7e9e4e71a30534023ca6b24ab4000567709ad53a013a61e910eb7145aa93e865664c54846f26e09a74bd577eaf66b5dd00d334288").should.be.rejected;
271 | })
272 |
273 | it('Fail Add Same Block Twice', async () => {
274 | // Successfully add id of another chain
275 | await ion.addChain(storage.address, TESTCHAINID);
276 |
277 | await ion.storeBlock(storage.address, TESTCHAINID, rlpEncodedBlock);
278 |
279 | await ion.storeBlock(storage.address, TESTCHAINID, rlpEncodedBlock).should.be.rejected;
280 | })
281 | })
282 | })
--------------------------------------------------------------------------------
/truffle.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | networks: {
3 | development: {
4 | host: "localhost",
5 | port: 8545,
6 | gas: 0xFFFFFFFFFFFFF,
7 | network_id: "*"
8 | },
9 | clique: {
10 | host: "localhost",
11 | port: 8501,
12 | network_id: "*"
13 | },
14 | coverage: {
15 | host: "localhost",
16 | port: 8555,
17 | network_id: "*", // Match any network id
18 | gas: 0xFFFFFFF,
19 | gasprice: 0x1
20 | },
21 | },
22 | mocha: {
23 | useColors: true,
24 | enableTimeouts: false
25 | },
26 | solc: {
27 | optimizer: {
28 | enabled: true,
29 | runs: 200
30 | }
31 | }
32 | };
--------------------------------------------------------------------------------