├── pallets ├── .gitignore ├── account-linker │ ├── src │ │ ├── tests │ │ │ ├── mod.rs │ │ │ ├── polkadot.rs │ │ │ ├── btc.rs │ │ │ └── eth.rs │ │ ├── btc │ │ │ ├── mod.rs │ │ │ ├── legacy.rs │ │ │ ├── base58.rs │ │ │ └── witness.rs │ │ ├── benchmarking.rs │ │ ├── mock.rs │ │ ├── weights.rs │ │ ├── util_eth.rs │ │ └── lib.rs │ └── Cargo.toml ├── nft │ ├── src │ │ ├── merkle_proof.rs │ │ ├── impl_nonfungibles.rs │ │ ├── weights.rs │ │ ├── benchmarking.rs │ │ ├── mock.rs │ │ ├── lib.rs │ │ └── tests.rs │ └── Cargo.toml └── offchain-worker │ ├── src │ ├── utils.rs │ ├── benchmarking.rs │ ├── ocw_manual_test.md │ ├── test_data.md │ ├── weights.rs │ ├── tests.rs │ ├── urls.rs │ └── lib.rs │ └── Cargo.toml ├── .github ├── ISSUE_TEMPLATE │ └── new-story-template.md └── workflows │ ├── pull_request_trigger_dev.yml │ ├── pull_request_trigger_rococo.yml │ └── build_test.yml ├── .gitignore ├── README.md ├── merkle-distributor ├── package.json ├── distributor.ts ├── balance-tree.ts └── merkle-tree.ts ├── rustfmt.toml ├── Cargo.toml └── LICENSE /pallets/.gitignore: -------------------------------------------------------------------------------- 1 | **/target/ -------------------------------------------------------------------------------- /pallets/account-linker/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | mod btc; 2 | mod eth; 3 | mod polkadot; 4 | -------------------------------------------------------------------------------- /pallets/account-linker/src/btc/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod base58; 2 | pub mod legacy; 3 | pub mod witness; 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-story-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New Story Template 3 | about: Create a new story 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Description** 11 | A clear and concise description of what the story is about. Leave it empty if the title already explains everything. 12 | 13 | **Acceptance Criteria** 14 | - [ ] Add xxx functionality. 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | **/node_modules/ 13 | 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # litentry-pallets 2 | [![Actions Status](https://github.com/litentry/litentry-pallets/workflows/Rust/badge.svg)](https://github.com/litentry/litentry-pallets/actions) 3 | 4 | This repository collets all Litentry pallets. Currently it includes: 5 | 6 | * account-linker 7 | * nft 8 | * offchain-worker 9 | 10 | Documentation can be found under [Litentry Pallets Rust Documentation](https://litentry.github.io/litentry-pallets/) 11 | -------------------------------------------------------------------------------- /merkle-distributor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "merkle-distributor", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.ts", 6 | "scripts": { 7 | "test": "ts-node distributor.ts" 8 | }, 9 | "author": "Han Zhao - Litentry", 10 | "license": "ISC", 11 | "devDependencies": { 12 | "@polkadot/api": "^2.10.1", 13 | "ethereumjs-util": "^7.0.4", 14 | "prettier": "^2.0.5", 15 | "ts-node": "^8.5.4", 16 | "typescript": "^3.7.3" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Basic 2 | hard_tabs = true 3 | max_width = 100 4 | use_small_heuristics = "Max" 5 | # Imports 6 | imports_granularity = "Crate" 7 | reorder_imports = true 8 | # Consistency 9 | newline_style = "Unix" 10 | # Misc 11 | chain_width = 80 12 | spaces_around_ranges = false 13 | binop_separator = "Back" 14 | reorder_impl_items = false 15 | match_arm_leading_pipes = "Preserve" 16 | match_arm_blocks = false 17 | match_block_trailing_comma = true 18 | trailing_comma = "Vertical" 19 | trailing_semicolon = false 20 | use_field_init_shorthand = true -------------------------------------------------------------------------------- /.github/workflows/pull_request_trigger_dev.yml: -------------------------------------------------------------------------------- 1 | name: Remote PR Action Initiator Litentry Node 2 | 3 | on: 4 | push: 5 | branches: dev 6 | 7 | jobs: 8 | pull-request-trigger-node: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Dispatch pull request trigger event 12 | run: | 13 | curl -X POST https://api.github.com/repos/litentry/litentry-node/dispatches \ 14 | -H 'Accept: application/vnd.github.everest-preview+json' \ 15 | -u ${{ secrets.ACCESS_TOKEN }} \ 16 | --data '{"event_type": "integration-test-trigger", "client_payload": { "repository": "'"$GITHUB_REPOSITORY"'", "ref": "${{ github.ref }}", "sha": "${{ github.sha }}" }}' 17 | -------------------------------------------------------------------------------- /.github/workflows/pull_request_trigger_rococo.yml: -------------------------------------------------------------------------------- 1 | name: Remote PR Action Initiator Litentry Parachain 2 | 3 | on: 4 | push: 5 | branches: rococo 6 | 7 | jobs: 8 | pull-request-trigger-parachain: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Dispatch pull request trigger event 12 | run: | 13 | curl -X POST https://api.github.com/repos/litentry/litentry-parachain/dispatches \ 14 | -H 'Accept: application/vnd.github.everest-preview+json' \ 15 | -u ${{ secrets.ACCESS_TOKEN }} \ 16 | --data '{"event_type": "integration-test-trigger", "client_payload": { "repository": "'"$GITHUB_REPOSITORY"'", "ref": "${{ github.ref }}", "sha": "${{ github.sha }}" }}' 17 | -------------------------------------------------------------------------------- /pallets/nft/src/merkle_proof.rs: -------------------------------------------------------------------------------- 1 | use crate::HashByte32; 2 | use sp_core::hash::H256; 3 | use sp_io::hashing::keccak_256; 4 | use sp_std::vec::Vec; 5 | 6 | /// Verify the given Merkle proof and Merkle root 7 | /// - Each pair of leaves and each pair of pre-images are assumed to be sorted. 8 | /// - With reference of https://docs.openzeppelin.com/contracts/4.x/api/utils#MerkleProof 9 | pub fn proof_verify( 10 | computed_hash: &HashByte32, 11 | proof: &Vec, 12 | root: &HashByte32, 13 | ) -> bool { 14 | let mut next_hash = computed_hash.clone(); 15 | 16 | for iter in proof { 17 | let iter_hash = H256::from_slice(iter); 18 | 19 | if iter_hash < H256::from_slice(&next_hash) { 20 | next_hash = keccak_256(&[iter_hash.as_bytes(), &next_hash].concat()); 21 | } else { 22 | next_hash = keccak_256(&[&next_hash, iter_hash.as_bytes()].concat()); 23 | } 24 | } 25 | 26 | next_hash == *root 27 | } 28 | -------------------------------------------------------------------------------- /merkle-distributor/distributor.ts: -------------------------------------------------------------------------------- 1 | import BalanceTree from "./balance-tree"; 2 | import { Keyring } from "@polkadot/api"; 3 | import { cryptoWaitReady } from "@polkadot/util-crypto"; 4 | 5 | //await cryptoWaitReady() 6 | 7 | //const keyring = new Keyring({ type: "sr25519" }) 8 | 9 | const alice = "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"; //keyring.addFromUri("//Alice", { name: "Alice default" }); 10 | const bob = "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty"; //keyring.addFromUri("//Bob", { name: "Bob default" }); 11 | //const dave = '5DAAnrj7VHTznn2AWBemMuyBwZWs6FNFjdyVXUeYum3PTXFy'; 12 | 13 | console.log(`Start building a merkle tree ...`); 14 | 15 | let tree = new BalanceTree([alice, bob]); 16 | 17 | console.log(`Build a tree from ${tree.getMekleTree().getLeaves()}`); 18 | 19 | console.log(`Merkle tree built successfully!!!`); 20 | 21 | console.log(`Root is ${tree.getHexRoot()}`); 22 | 23 | console.log(`Proof of Alice is ${tree.getProof(0, alice)}`); 24 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | 'pallets/account-linker', 4 | 'pallets/nft', 5 | 'pallets/offchain-worker', 6 | ] 7 | 8 | [profile.dev] 9 | opt-level = 0 10 | debug = true 11 | debug-assertions = true 12 | overflow-checks = true 13 | lto = false 14 | panic = 'unwind' 15 | incremental = true 16 | codegen-units = 256 17 | rpath = false 18 | 19 | [profile.test] 20 | opt-level = 0 21 | debug = 2 22 | debug-assertions = true 23 | overflow-checks = true 24 | lto = false 25 | panic = 'unwind' # This setting is always ignored. 26 | incremental = true 27 | codegen-units = 256 28 | rpath = false 29 | 30 | [profile.bench] 31 | opt-level = 3 32 | debug = false 33 | debug-assertions = false 34 | overflow-checks = false 35 | lto = false 36 | panic = 'unwind' # This setting is always ignored. 37 | incremental = false 38 | codegen-units = 16 39 | rpath = false 40 | 41 | [profile.release] 42 | opt-level = 3 43 | debug = false 44 | debug-assertions = false 45 | overflow-checks = false 46 | lto = false 47 | panic = 'unwind' 48 | incremental = false 49 | codegen-units = 16 50 | rpath = false 51 | -------------------------------------------------------------------------------- /.github/workflows/build_test.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: [ dev, stable ] 6 | pull_request: 7 | branches: [ dev, stable ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | 14 | build: 15 | 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Add wasm toolchain 21 | uses: actions-rs/toolchain@v1 22 | with: 23 | toolchain: stable-2021-06-17 24 | target: wasm32-unknown-unknown 25 | override: true 26 | - name: Build 27 | run: cargo build 28 | - name: Run unit tests 29 | run: cargo test --verbose --features runtime-benchmarks 30 | 31 | deploy: 32 | 33 | runs-on: ubuntu-latest 34 | needs: build 35 | if: github.ref == 'refs/heads/dev' 36 | 37 | steps: 38 | - uses: actions/checkout@v2 39 | - name: Docs 40 | run: | 41 | cargo doc --no-deps --all-features 42 | echo "" > target/doc/index.html 43 | - name: Deploy 44 | uses: JamesIves/github-pages-deploy-action@4.1.4 45 | with: 46 | branch: gh-pages 47 | folder: target/doc 48 | -------------------------------------------------------------------------------- /pallets/account-linker/src/tests/polkadot.rs: -------------------------------------------------------------------------------- 1 | use crate::mock::*; 2 | 3 | use frame_support::{assert_noop, assert_ok}; 4 | use sp_runtime::AccountId32; 5 | 6 | #[test] 7 | fn test_insert_polkadot_address() { 8 | new_test_ext().execute_with(|| { 9 | run_to_block(1); 10 | 11 | let origin: AccountId32 = AccountId32::from([0u8; 32]); 12 | let target: AccountId32 = AccountId32::from([1u8; 32]); 13 | 14 | assert_ok!(AccountLinker::link_polkadot(Origin::signed(origin.clone()), target.clone(), 0)); 15 | 16 | assert_eq!(AccountLinker::polkadot_pending(&origin), (target.clone(), 0)); 17 | 18 | assert_ok!(AccountLinker::accept_polkadot(Origin::signed(target.clone()), origin.clone())); 19 | 20 | assert_eq!( 21 | events(), 22 | [Event::AccountLinker(crate::Event::PolkadotAddressLinked( 23 | origin.clone(), 24 | target.clone() 25 | )),] 26 | ); 27 | 28 | assert_eq!(AccountLinker::polkadot_addresses(&origin), vec![target]); 29 | }); 30 | } 31 | 32 | #[test] 33 | fn test_no_polkadot_pending_address() { 34 | new_test_ext().execute_with(|| { 35 | run_to_block(1); 36 | 37 | let origin: AccountId32 = AccountId32::from([0u8; 32]); 38 | let target: AccountId32 = AccountId32::from([1u8; 32]); 39 | 40 | assert_eq!( 41 | AccountLinker::polkadot_pending(&origin), 42 | (AccountId32::default(), u32::default()) 43 | ); 44 | 45 | assert_noop!( 46 | AccountLinker::accept_polkadot(Origin::signed(target.clone()), origin.clone()), 47 | AccountLinkerError::WrongPendingRequest 48 | ); 49 | }); 50 | } 51 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/utils.rs: -------------------------------------------------------------------------------- 1 | use sp_std::prelude::*; 2 | 3 | // u128 number string to u128 4 | pub fn chars_to_u128(vec: &Vec) -> Result { 5 | // Check if the number string is decimal or hexadecimal (whether starting with 0x or not) 6 | let base = if vec.len() >= 2 && vec[0] == '0' && vec[1] == 'x' { 7 | // This is a hexadecimal number 8 | 16 9 | } else { 10 | // This is a decimal number 11 | 10 12 | }; 13 | 14 | let mut result: u128 = 0; 15 | for (i, item) in vec.iter().enumerate() { 16 | // Skip the 0 and x digit for hex. 17 | // Using skip here instead of a new vec build to avoid an unnecessary copy operation 18 | if base == 16 && i < 2 { 19 | continue 20 | } 21 | 22 | let n = item.to_digit(base); 23 | match n { 24 | Some(i) => { 25 | let i_64 = i as u128; 26 | result = result * base as u128 + i_64; 27 | if result < i_64 { 28 | return Err("Wrong u128 balance data format") 29 | } 30 | }, 31 | None => return Err("Wrong u128 balance data format"), 32 | } 33 | } 34 | return Ok(result) 35 | } 36 | 37 | // number byte to string byte 38 | pub fn u8_to_str_byte(a: u8) -> u8 { 39 | if a < 10 { 40 | return a + 48 as u8 41 | } else { 42 | return a + 87 as u8 43 | } 44 | } 45 | 46 | // address to string bytes 47 | pub fn address_to_string(address: &[u8; 20]) -> Vec { 48 | let mut vec_result: Vec = Vec::new(); 49 | for item in address { 50 | let a: u8 = item & 0x0F; 51 | let b: u8 = item >> 4; 52 | vec_result.push(u8_to_str_byte(b)); 53 | vec_result.push(u8_to_str_byte(a)); 54 | } 55 | return vec_result 56 | } 57 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/benchmarking.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "runtime-benchmarks")] 2 | 3 | use super::*; 4 | 5 | use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite}; 6 | use frame_support::{ensure, traits::OnFinalize}; 7 | 8 | use frame_system::RawOrigin; 9 | use sp_std::prelude::*; 10 | 11 | use crate::Pallet as OCW; 12 | 13 | benchmarks! { 14 | 15 | asset_claim { 16 | let caller = account("caller", 0, 0); 17 | }: asset_claim(RawOrigin::Signed(caller)) 18 | 19 | submit_balance { 20 | let caller = account("caller", 0, 0); 21 | let account_id = account("Alice", 0, 0); 22 | >::insert(&account_id, Some(0_u32)); 23 | let block_number = 1_u32; 24 | let data_source = urls::DataSource::EthEtherScan; 25 | let balance = 0_u128; 26 | 27 | }: submit_balance(RawOrigin::Signed(caller), account_id, block_number.into(), data_source.into(), balance) 28 | 29 | on_finalize { 30 | let caller: T::AccountId = account("caller", 0, 0); 31 | 32 | let block_number = 4_u32; 33 | let data_source = crate::urls::DataSource::EthEtherScan; 34 | 35 | for index in 1..100 { 36 | let sender: T::AccountId = account("sender", index, index); 37 | let balance = index as u128; 38 | let query_key: crate::QueryKey:: = crate::QueryKey{ 39 | account: sender.clone(), 40 | data_source: data_source}; 41 | 42 | CommitAccountBalance::::insert(&sender.clone(), &query_key, Some(balance)); 43 | } 44 | 45 | }: { OCW::::on_finalize(block_number.into()); } 46 | } 47 | -------------------------------------------------------------------------------- /merkle-distributor/balance-tree.ts: -------------------------------------------------------------------------------- 1 | import { UInt } from "@polkadot/types/codec"; 2 | import { TypeRegistry } from "@polkadot/types/create"; 3 | import { decodeAddress, keccakAsU8a } from "@polkadot/util-crypto"; 4 | import MerkleTree from "./merkle-tree"; 5 | 6 | export default class BalanceTree { 7 | private readonly tree: MerkleTree; 8 | constructor(accounts: string[]) { 9 | this.tree = new MerkleTree( 10 | accounts.map((account, index) => { 11 | return BalanceTree.toNode(index, account); 12 | }) 13 | ); 14 | } 15 | 16 | public getMekleTree(): MerkleTree { 17 | return this.tree; 18 | } 19 | 20 | public static verifyProof( 21 | index: number, 22 | account: string, 23 | proof: Buffer[], 24 | root: Buffer 25 | ): boolean { 26 | let pair = BalanceTree.toNode(index, account); 27 | for (const item of proof) { 28 | pair = MerkleTree.combinedHash(pair, item); 29 | } 30 | return pair.equals(root); 31 | } 32 | 33 | // keccak256(SCALE.encode(index, account)) 34 | public static toNode(index: number, account: string): Buffer { 35 | const registry = new TypeRegistry(); 36 | 37 | // encode index as u16 SCALE 38 | let encodedIndex = new UInt(registry, index, 16).toU8a(); 39 | 40 | // decode address from ss58 to u8a 41 | // TODO consider error handling 42 | let encodedAcc = decodeAddress(account); 43 | 44 | let encodedMsg = new Uint8Array(encodedIndex.length + encodedAcc.length); 45 | 46 | encodedMsg.set(encodedIndex); 47 | encodedMsg.set(encodedAcc, encodedIndex.length); 48 | 49 | console.log(`Encoded bytes is ${encodedMsg}`); 50 | 51 | let buf = Buffer.from(keccakAsU8a(encodedMsg)); 52 | return buf; 53 | } 54 | 55 | public getHexRoot(): string { 56 | return this.tree.getHexRoot(); 57 | } 58 | 59 | // returns the hex bytes32 values of the proof 60 | public getProof(index: number, account: string): string[] { 61 | return this.tree.getHexProof(BalanceTree.toNode(index, account)); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/ocw_manual_test.md: -------------------------------------------------------------------------------- 1 | # ocw manual test instruction 2 | 1. start the local token server 3 | cp token-server/.env.example .env 4 | update the API tokens in .env 5 | source .env 6 | target/release/litentry-token-server 7 | 2. start the litentry node 8 | target/release/litentry-node --dev 9 | 3. create ocw session account and send to ocw module via curl command 10 | bash-5.0$ subkey generate 11 | 12 | Secret phrase `loop high amazing chat tennis auto denial attend type quit liquid tonight` is account: 13 | Secret seed: 0xad9e7d8233eff5b32ebdf1cfd6d2007f0bfa7c73f7d2d7e60f95dbd642a8af54 14 | Public key (hex): 0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d 15 | Account ID: 0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d 16 | SS58 Address: 5FEYX9NES9mAJt1Xg4WebmHWywxyeGQK8G3oEBXtyfZrRePX 17 | 18 | $ curl http://localhost:9933 -H "Content-Type:application/json;charset=utf-8" -d \ 19 | '{ 20 | "jsonrpc":"2.0", 21 | "id":1, 22 | "method":"author_insertKey", 23 | "params": [ 24 | "ocw!", 25 | "loop high amazing chat tennis auto denial attend type quit liquid tonight", 26 | "0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d" 27 | ] 28 | }' 29 | 4. transfer some token to ocw account in UI 30 | transaction -> balances -> transfer -> from Alice to 5FEYX9NES9mAJt1Xg4WebmHWywxyeGQK8G3oEBXtyfZrRePX 31 | 5. link eth account to Alice 32 | transaction -> AccountLinderModule -> linkEth 33 | 34 | eth address 0x4d88dc5d528a33e4b8be579e9476715f60060582 35 | block number 10000 36 | r 0x318400f0f9bd15f0d8842870b510e996dffc944b77111ded03a4255c66e82d42 37 | s 0x7132e765d5e6bb21ba046dbb98e28bb28cb2bebe0c8aced2c547aca60a554892 38 | v 0x1c 39 | 6. call asset claim in ocw module 40 | transaction -> OffchainWorker -> AssetClaim 41 | 7. after 5 blocks check the balance 42 | state -> OffchainWorker -> accountBalance 43 | its eth balance should be 0.5 eth. 44 | 8. check the balance of ocw account 45 | its balance should be incresed 1 dot. -------------------------------------------------------------------------------- /pallets/nft/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ['Litentry Dev'] 3 | description = 'FRAME nft pallet' 4 | edition = '2018' 5 | homepage = 'https://litentry.com' 6 | license = 'Unlicense' 7 | name = 'pallet-nft' 8 | repository = 'https://github.com/litentry/litentry-pallets' 9 | version = '0.0.1' 10 | 11 | [dependencies] 12 | serde = { version = "1.0.119", optional = true } 13 | codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } 14 | enumflags2 = { version = "0.6.3" } 15 | 16 | frame-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false, optional = true} 17 | frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 18 | frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 19 | sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 20 | sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 21 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 22 | sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 23 | 24 | orml-nft = { git = "https://github.com/open-web3-stack/open-runtime-module-library", default-features = false, rev = "ca054d3078a8d157d0c176ed319d6ff49f38d349" } 25 | orml-traits = { git = "https://github.com/open-web3-stack/open-runtime-module-library", default-features = false, rev = "ca054d3078a8d157d0c176ed319d6ff49f38d349" } 26 | 27 | [dev-dependencies] 28 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 29 | pallet-balances = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 30 | pallet-utility = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 31 | 32 | [features] 33 | default = ["std"] 34 | std = [ 35 | "serde", 36 | "codec/std", 37 | "enumflags2/serde", 38 | "sp-std/std", 39 | "sp-io/std", 40 | "sp-runtime/std", 41 | "frame-support/std", 42 | "frame-system/std", 43 | "orml-traits/std", 44 | "orml-nft/std", 45 | "enumflags2/serde", 46 | ] 47 | runtime-benchmarks = ["frame-benchmarking"] 48 | -------------------------------------------------------------------------------- /pallets/account-linker/src/benchmarking.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "runtime-benchmarks")] 2 | 3 | use super::*; 4 | use crate::Pallet as AccountLinker; 5 | use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite}; 6 | use frame_system::RawOrigin; 7 | 8 | use sp_std::prelude::*; 9 | 10 | const SEED: u32 = 0; 11 | 12 | benchmarks! { 13 | link_eth { 14 | let caller = account("Alice", 0, SEED); 15 | let index: u32 = 0; 16 | let addr_expected: EthAddress = [16, 146, 71, 235, 177, 95, 237, 92, 255, 45, 73, 190, 133, 132, 185, 41, 14, 77, 9, 207]; 17 | let expiring_block_number: u32 = 10000; 18 | let sig: Signature = [133, 13, 66, 20, 141, 102, 233, 186, 153, 38, 81, 149, 29, 16, 191, 87, 206, 103, 230, 184, 32, 165, 174, 40, 221, 54, 212, 61, 132, 38, 254, 39, 19, 118, 77, 20, 241, 238, 52, 206, 124, 232, 254, 37, 109, 69, 191, 253, 242, 19, 48, 32, 92, 134, 123, 2, 6, 223, 233, 225, 129, 41, 235, 116, 28]; 19 | }: link_eth(RawOrigin::Signed(caller), index, addr_expected, expiring_block_number.into(), sig) 20 | 21 | link_btc { 22 | let caller = account("caller", 0, 0); 23 | let account_id: T::AccountId = account("Alice", 0, SEED); 24 | let index: u32 = 0; 25 | let addr_expected = vec![49, 51, 121, 55, 106, 72, 52, 85, 57, 113, 68, 112, 69, 77, 77, 119, 87, 90, 117, 52, 99, 122, 52, 107, 55, 67, 81, 107, 90, 72, 100, 101, 113, 71]; 26 | let expiring_block_number: u32 = 10000; 27 | let sig: Signature = [250, 57, 156, 18, 181, 153, 186, 77, 81, 242, 31, 146, 82, 115, 85, 163, 136, 220, 104, 194, 98, 88, 28, 109, 163, 113, 12, 47, 193, 183, 189, 106, 41, 163, 172, 76, 129, 83, 66, 195, 126, 213, 207, 91, 186, 70, 255, 125, 111, 38, 123, 240, 178, 101, 22, 192, 133, 22, 245, 109, 50, 175, 225, 208, 0]; 28 | }: link_btc(RawOrigin::Signed(caller), account_id.clone(), index, addr_expected, expiring_block_number.into(), sig) 29 | 30 | link_polkadot { 31 | let caller = account("caller", 0, 0); 32 | let linked_account: T::AccountId = account("Alice", 0, SEED); 33 | let index: u32 = 0; 34 | }: _(RawOrigin::Signed(caller), linked_account, index) 35 | 36 | accept_polkadot { 37 | let caller: T::AccountId = account("caller", 0, 0); 38 | let linked_account: T::AccountId = account("Alice", 0, SEED); 39 | let index: u32 = 0; 40 | crate::Pallet::::link_polkadot(RawOrigin::Signed(caller.clone()).into(), linked_account.clone(), index)?; 41 | }: _(RawOrigin::Signed(linked_account), caller) 42 | } 43 | 44 | impl_benchmark_test_suite!(AccountLinker, crate::mock::new_test_ext(), crate::mock::Test,); 45 | -------------------------------------------------------------------------------- /pallets/offchain-worker/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ['Litentry Dev'] 3 | description = 'FRAME pallet template for defining custom runtime logic.' 4 | edition = '2018' 5 | homepage = 'https://litentry.com' 6 | license = 'Unlicense' 7 | name = 'pallet-offchain-worker' 8 | repository = 'https://github.com/litentry/litentry-offchain-worker' 9 | version = '0.0.1' 10 | 11 | [package.metadata.docs.rs] 12 | targets = ['x86_64-unknown-linux-gnu'] 13 | 14 | [dependencies] 15 | codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } 16 | serde = { version = "1.0.119", optional = true } 17 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 18 | sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 19 | sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 20 | sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 21 | frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 22 | frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 23 | sp-arithmetic = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 24 | frame-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false, optional = true } 25 | 26 | alt_serde = { version = "=1.0.104", default-features = false, features = ["derive"] } 27 | serde_json = { version = "1", default-features = false, git = "https://github.com/Xanewok/json", branch = "no-std", features = ["alloc"] } 28 | parking_lot = "0.10.0" 29 | log = { version = "0.4.14", default-features = false } 30 | 31 | account-linker = {path = "../account-linker", package = "pallet-account-linker", default-features = false} 32 | 33 | [dev-dependencies] 34 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 35 | pallet-balances = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 36 | 37 | hex = "0.4.2" 38 | 39 | [features] 40 | default = ['std'] 41 | std = [ 42 | "codec/std", 43 | "frame-support/std", 44 | "frame-system/std", 45 | "serde", 46 | "sp-core/std", 47 | "sp-io/std", 48 | "sp-runtime/std", 49 | "sp-std/std", 50 | "account-linker/std", 51 | "frame-benchmarking/std" 52 | ] 53 | runtime-benchmarks = ["frame-benchmarking"] -------------------------------------------------------------------------------- /pallets/account-linker/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ['Litentry Dev'] 3 | description = 'FRAME pallet template for defining custom runtime logic.' 4 | edition = '2018' 5 | homepage = 'https://litentry.com' 6 | license = 'Unlicense' 7 | name = 'pallet-account-linker' 8 | repository = 'https://github.com/litentry/litentry-account-linker' 9 | version = '0.1.0' 10 | 11 | [package.metadata.docs.rs] 12 | targets = ['x86_64-unknown-linux-gnu'] 13 | 14 | # alias "parity-scale-code" to "codec" 15 | [dependencies.codec] 16 | default-features = false 17 | features = ['derive'] 18 | package = 'parity-scale-codec' 19 | version = '2.0.0' 20 | 21 | [dependencies] 22 | 23 | frame-support = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 24 | frame-system = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 25 | sp-io = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 26 | sp-std = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 27 | sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false, optional = true } 28 | frame-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false, optional = true } 29 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false, optional = true } 30 | 31 | log = { version = "0.4.14", default-features = false } 32 | sha2 = { default-features = false, version = "0.9.5" } 33 | ripemd160 = {default-features = false, version = "0.9.1" } 34 | 35 | [dev-dependencies] 36 | sp-core = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 37 | sp-runtime = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8", default-features = false } 38 | frame-benchmarking = { git = "https://github.com/paritytech/substrate", branch = "polkadot-v0.9.8" } 39 | 40 | parity-crypto = {version = "0.8.0", features = ["publickey"] } 41 | bitcoin = { version = "0.25.2", features = ["rand"] } 42 | serde = { version = "1.0.119" } 43 | hex = "0.4.2" 44 | 45 | [features] 46 | default = ['std'] 47 | std = [ 48 | 'codec/std', 49 | 'frame-support/std', 50 | 'frame-system/std', 51 | "sp-io/std", 52 | "sp-std/std", 53 | "sha2/std", 54 | "ripemd160/std", 55 | "log/std", 56 | "frame-benchmarking/std", 57 | ] 58 | runtime-benchmarks = ["sp-runtime", "frame-benchmarking", "sp-core"] 59 | -------------------------------------------------------------------------------- /pallets/account-linker/src/mock.rs: -------------------------------------------------------------------------------- 1 | use crate as account_linker; 2 | use frame_support::{ 3 | parameter_types, 4 | traits::{OnFinalize, OnInitialize}, 5 | }; 6 | use frame_system as system; 7 | use sp_core::H256; 8 | use sp_runtime::{ 9 | generic, 10 | traits::{BlakeTwo256, IdentityLookup}, 11 | AccountId32, 12 | }; 13 | 14 | pub use crate::MAX_ETH_LINKS; 15 | 16 | type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; 17 | type Block = frame_system::mocking::MockBlock; 18 | 19 | // Configure a mock runtime to test the pallet. 20 | frame_support::construct_runtime!( 21 | pub enum Test where 22 | Block = Block, 23 | NodeBlock = Block, 24 | UncheckedExtrinsic = UncheckedExtrinsic, 25 | { 26 | System: frame_system::{Pallet, Call, Config, Storage, Event}, 27 | AccountLinker: account_linker::{Pallet, Call, Storage, Event}, 28 | } 29 | ); 30 | 31 | parameter_types! { 32 | pub const BlockHashCount: u32 = 250; 33 | pub const SS58Prefix: u8 = 42; 34 | } 35 | 36 | impl system::Config for Test { 37 | type BaseCallFilter = (); 38 | type Origin = Origin; 39 | type BlockWeights = (); 40 | type BlockLength = (); 41 | type DbWeight = (); 42 | type Call = Call; 43 | type Index = u32; 44 | type BlockNumber = u32; 45 | type Hash = H256; 46 | type Hashing = BlakeTwo256; 47 | type AccountId = AccountId32; 48 | type Lookup = IdentityLookup; 49 | type Header = generic::Header; 50 | type Event = Event; 51 | type BlockHashCount = BlockHashCount; 52 | type Version = (); 53 | type PalletInfo = PalletInfo; 54 | type AccountData = (); 55 | type OnNewAccount = (); 56 | type OnKilledAccount = (); 57 | type SystemWeightInfo = (); 58 | type SS58Prefix = SS58Prefix; 59 | type OnSetCode = (); 60 | } 61 | 62 | impl account_linker::Config for Test { 63 | type Event = Event; 64 | type WeightInfo = (); 65 | } 66 | 67 | pub type AccountLinkerError = account_linker::Error; 68 | 69 | // Build genesis storage according to the mock runtime. 70 | pub fn new_test_ext() -> sp_io::TestExternalities { 71 | system::GenesisConfig::default().build_storage::().unwrap().into() 72 | } 73 | 74 | pub fn run_to_block(n: u32) { 75 | while System::block_number() < n { 76 | AccountLinker::on_finalize(System::block_number()); 77 | System::on_finalize(System::block_number()); 78 | System::set_block_number(System::block_number() + 1); 79 | System::on_initialize(System::block_number()); 80 | AccountLinker::on_initialize(System::block_number()); 81 | } 82 | } 83 | 84 | pub fn events() -> Vec { 85 | let evt = System::events().into_iter().map(|evt| evt.event).collect::>(); 86 | 87 | System::reset_events(); 88 | 89 | evt 90 | } 91 | -------------------------------------------------------------------------------- /pallets/nft/src/impl_nonfungibles.rs: -------------------------------------------------------------------------------- 1 | //! Implementations for `nonfungibles` traits. 2 | 3 | use super::*; 4 | use frame_support::traits::tokens::nonfungibles::{Inspect, Transfer}; 5 | use sp_runtime::DispatchResult; 6 | 7 | impl Inspect<::AccountId> for Pallet { 8 | type InstanceId = T::TokenId; 9 | type ClassId = T::ClassId; 10 | 11 | fn owner( 12 | class: &Self::ClassId, 13 | instance: &Self::InstanceId, 14 | ) -> Option<::AccountId> { 15 | orml_nft::Pallet::::tokens(class, instance).map(|a| a.owner) 16 | } 17 | 18 | fn class_owner(class: &Self::ClassId) -> Option<::AccountId> { 19 | orml_nft::Pallet::::classes(class).map(|a| a.owner) 20 | } 21 | 22 | /// Returns the attribute value of `instance` of `class` corresponding to `key`. 23 | /// 24 | /// When `key` is empty, we return the instance metadata value. 25 | /// 26 | /// By default this is `None`; no attributes are defined. 27 | fn attribute( 28 | class: &Self::ClassId, 29 | instance: &Self::InstanceId, 30 | key: &[u8], 31 | ) -> Option> { 32 | if key.is_empty() { 33 | // We make the empty key map to the instance metadata value. 34 | orml_nft::Pallet::::tokens(class, instance).map(|a| a.metadata.into()) 35 | } else { 36 | return None; 37 | } 38 | } 39 | 40 | /// Returns the attribute value of `instance` of `class` corresponding to `key`. 41 | /// 42 | /// When `key` is empty, we return the instance metadata value. 43 | /// 44 | /// By default this is `None`; no attributes are defined. 45 | fn class_attribute(class: &Self::ClassId, key: &[u8]) -> Option> { 46 | if key.is_empty() { 47 | // We make the empty key map to the instance metadata value. 48 | orml_nft::Pallet::::classes(class).map(|a| a.metadata.into()) 49 | } else { 50 | return None; 51 | } 52 | } 53 | 54 | /// Returns `true` if the asset `instance` of `class` may be transferred. 55 | /// 56 | /// Default implementation is that all assets are transferable. 57 | fn can_transfer(class: &Self::ClassId, instance: &Self::InstanceId) -> bool { 58 | match orml_nft::Pallet::::classes(class) { 59 | Some(class) => class.data.properties.0.contains(ClassProperty::Transferable), 60 | _ => false, 61 | } 62 | } 63 | } 64 | 65 | impl Transfer for Pallet { 66 | fn transfer( 67 | class: &Self::ClassId, 68 | instance: &Self::InstanceId, 69 | destination: &T::AccountId, 70 | ) -> DispatchResult { 71 | let from = orml_nft::Pallet::::tokens(class, instance) 72 | .map(|a| a.owner) 73 | .ok_or(Error::::TokenNotFound)?; 74 | Self::do_transfer(&from, &destination, (*class, *instance))?; 75 | Ok(()) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /pallets/account-linker/src/btc/legacy.rs: -------------------------------------------------------------------------------- 1 | use ripemd160::Ripemd160; 2 | use sha2::{Digest, Sha256}; 3 | 4 | pub fn btc_addr_from_pk(pk: &[u8]) -> [u8; 25] { 5 | let mut result = [0u8; 25]; 6 | 7 | // Now only support P2PKH (Mainnet) prefix = 0 8 | result[0] = 0; 9 | result[1..21].copy_from_slice(&hash160(pk)); 10 | let cs = checksum(&result[0..21]); 11 | result[21..25].copy_from_slice(&cs); 12 | result 13 | } 14 | 15 | pub fn hash160(bytes: &[u8]) -> [u8; 20] { 16 | let mut hasher_sha256 = Sha256::new(); 17 | hasher_sha256.update(bytes); 18 | let digest = hasher_sha256.finalize(); 19 | 20 | let mut hasher_ripemd = Ripemd160::new(); 21 | hasher_ripemd.update(digest); 22 | 23 | let mut ret = [0; 20]; 24 | ret.copy_from_slice(&hasher_ripemd.finalize()[..]); 25 | ret 26 | } 27 | 28 | fn checksum(input: &[u8]) -> [u8; 4] { 29 | let mut result = [0u8; 4]; 30 | result.copy_from_slice(&dsha256(input)[0..4]); 31 | result 32 | } 33 | 34 | /// Computes Bitcoin's double SHA256 hash over a LE byte encoded input 35 | /// 36 | /// # Arguments 37 | /// * data: LE bytes encoded input 38 | /// 39 | /// # Returns 40 | /// * The double SHA256 hash encoded as LE bytes from data 41 | fn dsha256(bytes: &[u8]) -> [u8; 32] { 42 | let mut hasher = Sha256::new(); 43 | hasher.update(bytes); 44 | let digest = hasher.finalize(); 45 | 46 | let mut second_hasher = Sha256::new(); 47 | second_hasher.update(digest); 48 | 49 | let mut ret = [0; 32]; 50 | ret.copy_from_slice(&second_hasher.finalize()[..]); 51 | ret 52 | } 53 | 54 | // test data can be obtained from here http://gobittest.appspot.com/Address 55 | #[cfg(test)] 56 | mod tests { 57 | use super::*; 58 | use hex::decode; 59 | 60 | #[test] 61 | fn correct_dhash160() { 62 | let pk = decode("0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6").unwrap(); 63 | 64 | let hash = hash160(&pk); 65 | 66 | let result = decode("010966776006953D5567439E5E39F86A0D273BEE").unwrap(); 67 | let mut hash_expected = [0u8; 20]; 68 | hash_expected[0..20].copy_from_slice(&result[0..20]); 69 | 70 | assert_eq!(hash, hash_expected); 71 | } 72 | 73 | #[test] 74 | fn correct_btc_addr_from_pk() { 75 | let pk = decode("0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6").unwrap(); 76 | let mut pk_input = [0u8; 65]; 77 | pk_input[0..65].copy_from_slice(&pk[0..65]); 78 | 79 | let addr = btc_addr_from_pk(&pk_input); 80 | 81 | let addr_expected_hex = 82 | decode("00010966776006953D5567439E5E39F86A0D273BEED61967F6").unwrap(); 83 | let mut addr_expected = [0u8; 25]; 84 | addr_expected[0..25].copy_from_slice(&addr_expected_hex[0..25]); 85 | assert_eq!(addr, addr_expected); 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/test_data.md: -------------------------------------------------------------------------------- 1 | # Ethereum signature for test 2 | eth address 0x4d88dc5d528a33e4b8be579e9476715f60060582 3 | [77, 136, 220, 93, 82, 138, 51, 228, 184, 190, 87, 158, 148, 118, 113, 95, 96, 6, 5, 130] 4 | block number 10000 5 | signature 6 | r 0x318400f0f9bd15f0d8842870b510e996dffc944b77111ded03a4255c66e82d42 7 | [49, 132, 0, 240, 249, 189, 21, 240, 216, 132, 40, 112, 181, 16, 233, 150, 223, 252, 148, 75, 119, 17, 29, 237, 3, 164, 37, 92, 102, 232, 45, 66] 8 | s 0x7132e765d5e6bb21ba046dbb98e28bb28cb2bebe0c8aced2c547aca60a554892 9 | [113, 50, 231, 101, 213, 230, 187, 33, 186, 4, 109, 187, 152, 226, 139, 178, 140, 178, 190, 190, 12, 138, 206, 210, 197, 71, 172, 166, 10, 85, 72, 146] 10 | v 0x1c 11 | 12 | https://api-ropsten.etherscan.io/api?module=account&action=balancemulti&address=0x167453188a05082e3b347c1c518f3dd55d37fbbf&tag=latest&apikey=RF71W4Z2RDA7XQD6EN19NGB66C2QD9UPHB 13 | 14 | # Bitcoin 15 | 167453188a05082e3b347c1c518f3dd55d37fbbf 16 | 17 | ## bitcoin private key 18 | 84cf749059a129fb57a6070411234a4fe58e7f04c959b66fa7c8b2d2dd609749 19 | 20 | ## full public key 21 | 04a097026e876544a0e40f9ca836435560af4470e161bf60c23465dcb3151c947d1cbe052875211972107e25fca8dd939f1c6e749a43862673ec5cf7a8567f2d95 22 | 23 | ## compressed public key 24 | 03a097026e876544a0e40f9ca836435560af4470e161bf60c23465dcb3151c947d 25 | 26 | ## hash160 27 | 00828a58e0b8731f81e9f0f4e789a6ddeef62ef108 28 | 29 | ## bs58 address 30 | 1CuEaUAvhm8D9SkwEVLgqvFShHAxKmbW19 31 | 32 | 33 | ## bitcoin signature 34 | r = 0x7ab3d2376eda8d43f0d09cc3bbfaf84131db254adbd44b2366ed537ddac3c230 35 | [122, 179, 210, 55, 110, 218, 141, 67, 240, 208, 156, 195, 187, 250, 248, 65, 49, 219, 37, 74, 219, 212, 75, 35, 102, 237, 83, 125, 218, 195, 194, 48] 36 | 37 | s = 0x27cbdc60995ead5268c61a3b1f86042c030548e9377aec95909c87dc625dcb2a 38 | [39, 203, 220, 96, 153, 94, 173, 82, 104, 198, 26, 59, 31, 134, 4, 44, 3, 5, 72, 233, 55, 122, 236, 149, 144, 156, 135, 220, 98, 93, 203, 42] 39 | 40 | v = 0 41 | 42 | [u8; 25] prefix 0 + hash160 + checksum 4 bytes. 43 | 0x001db22bfeb133761132194c1c4688194dd58fc6373672a95d 44 | address 1MnqFoXqLWDGByG94fvvvWAPWE6tEUMQET 45 | 46 | 47 | ## Insert key off-chain worker to sign tx 48 | ### Generate a new account 49 | bash-5.0$ subkey generate 50 | 51 | Secret phrase `loop high amazing chat tennis auto denial attend type quit liquid tonight` is account: 52 | Secret seed: 0xad9e7d8233eff5b32ebdf1cfd6d2007f0bfa7c73f7d2d7e60f95dbd642a8af54 53 | Public key (hex): 0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d 54 | Account ID: 0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d 55 | SS58 Address: 5FEYX9NES9mAJt1Xg4WebmHWywxyeGQK8G3oEBXtyfZrRePX 56 | 57 | ### Submit a new key via RPC 58 | $ curl http://localhost:9933 -H "Content-Type:application/json;charset=utf-8" -d \ 59 | '{ 60 | "jsonrpc":"2.0", 61 | "id":1, 62 | "method":"author_insertKey", 63 | "params": [ 64 | "ocw!", 65 | "loop high amazing chat tennis auto denial attend type quit liquid tonight", 66 | "0x8c35b97c56099cf3b5c631d1f296abbb11289857e74a8f60936290080d56da6d" 67 | ] 68 | }' 69 | 70 | ### transfer token from alice to 5FEYX9NES9mAJt1Xg4WebmHWywxyeGQK8G3oEBXtyfZrRePX 71 | 72 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/weights.rs: -------------------------------------------------------------------------------- 1 | // This file is part of Substrate. 2 | 3 | // Copyright (C) 2021 Parity Technologies (UK) Ltd. 4 | // SPDX-License-Identifier: Apache-2.0 5 | 6 | // Licensed under the Apache License, Version 2.0 (the "License"); 7 | // you may not use this file except in compliance with the License. 8 | // You may obtain a copy of the License at 9 | // 10 | // http://www.apache.org/licenses/LICENSE-2.0 11 | // 12 | // Unless required by applicable law or agreed to in writing, software 13 | // distributed under the License is distributed on an "AS IS" BASIS, 14 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | // See the License for the specific language governing permissions and 16 | // limitations under the License. 17 | 18 | //! Autogenerated weights for pallet_offchain_worker 19 | //! 20 | //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 3.0.0 21 | //! DATE: 2021-08-11, STEPS: [20, ], REPEAT: 50, LOW RANGE: [], HIGH RANGE: [] 22 | //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("./source/local.json"), DB CACHE: 20 23 | 24 | // Executed Command: 25 | // target/release/litentry-collator 26 | // benchmark 27 | // --chain=./source/local.json 28 | // --execution=wasm 29 | // --db-cache=20 30 | // --wasm-execution=compiled 31 | // --pallet=pallet-offchain-worker 32 | // --extrinsic=* 33 | // --heap-pages=4096 34 | // --steps=20 35 | // --repeat=50 36 | // --output=./source/weights.rs 37 | // --template=./.maintain/frame-weight-template.hbs 38 | 39 | 40 | #![allow(unused_parens)] 41 | #![allow(unused_imports)] 42 | 43 | use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; 44 | use sp_std::marker::PhantomData; 45 | 46 | /// Weight functions needed for pallet_offchain_worker. 47 | pub trait WeightInfo { 48 | fn asset_claim() -> Weight; 49 | fn submit_balance() -> Weight; 50 | fn on_finalize() -> Weight; 51 | } 52 | 53 | /// Weights for pallet_offchain_worker using the Substrate node and recommended hardware. 54 | pub struct SubstrateWeight(PhantomData); 55 | impl WeightInfo for SubstrateWeight { 56 | fn asset_claim() -> Weight { 57 | (8_577_000 as Weight) 58 | .saturating_add(T::DbWeight::get().reads(1 as Weight)) 59 | .saturating_add(T::DbWeight::get().writes(1 as Weight)) 60 | } 61 | fn submit_balance() -> Weight { 62 | (34_248_000 as Weight) 63 | .saturating_add(T::DbWeight::get().reads(4 as Weight)) 64 | .saturating_add(T::DbWeight::get().writes(2 as Weight)) 65 | } 66 | fn on_finalize() -> Weight { 67 | (3_882_140_000 as Weight) 68 | .saturating_add(T::DbWeight::get().reads(398 as Weight)) 69 | .saturating_add(T::DbWeight::get().writes(298 as Weight)) 70 | } 71 | } 72 | 73 | // For backwards compatibility and tests 74 | impl WeightInfo for () { 75 | fn asset_claim() -> Weight { 76 | (8_577_000 as Weight) 77 | .saturating_add(RocksDbWeight::get().reads(1 as Weight)) 78 | .saturating_add(RocksDbWeight::get().writes(1 as Weight)) 79 | } 80 | fn submit_balance() -> Weight { 81 | (34_248_000 as Weight) 82 | .saturating_add(RocksDbWeight::get().reads(4 as Weight)) 83 | .saturating_add(RocksDbWeight::get().writes(2 as Weight)) 84 | } 85 | fn on_finalize() -> Weight { 86 | (3_882_140_000 as Weight) 87 | .saturating_add(RocksDbWeight::get().reads(398 as Weight)) 88 | .saturating_add(RocksDbWeight::get().writes(298 as Weight)) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /pallets/account-linker/src/weights.rs: -------------------------------------------------------------------------------- 1 | // This file is part of Substrate. 2 | 3 | // Copyright (C) 2021 Parity Technologies (UK) Ltd. 4 | // SPDX-License-Identifier: Apache-2.0 5 | 6 | // Licensed under the Apache License, Version 2.0 (the "License"); 7 | // you may not use this file except in compliance with the License. 8 | // You may obtain a copy of the License at 9 | // 10 | // http://www.apache.org/licenses/LICENSE-2.0 11 | // 12 | // Unless required by applicable law or agreed to in writing, software 13 | // distributed under the License is distributed on an "AS IS" BASIS, 14 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | // See the License for the specific language governing permissions and 16 | // limitations under the License. 17 | 18 | //! Autogenerated weights for pallet_account_linker 19 | //! 20 | //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 3.0.0 21 | //! DATE: 2021-02-25, STEPS: [20, ], REPEAT: 50, LOW RANGE: [], HIGH RANGE: [] 22 | //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 23 | 24 | // Executed Command: 25 | // target/release/litentry-node 26 | // benchmark 27 | // --chain=dev 28 | // --execution=wasm 29 | // --wasm-execution=compiled 30 | // --pallet=pallet_account_linker 31 | // --extrinsic=* 32 | // --heap-pages=4096 33 | // --steps=20 34 | // --repeat=50 35 | // --output=./pallets/account-linker/src/weights.rs 36 | // --template=./.maintain/frame-weight-template.hbs 37 | 38 | #![allow(unused_parens)] 39 | #![allow(unused_imports)] 40 | 41 | use frame_support::{ 42 | traits::Get, 43 | weights::{constants::RocksDbWeight, Weight}, 44 | }; 45 | use sp_std::marker::PhantomData; 46 | 47 | /// Weight functions needed for pallet_account_linker. 48 | pub trait WeightInfo { 49 | fn link_eth() -> Weight; 50 | fn link_btc() -> Weight; 51 | fn link_polkadot() -> Weight; 52 | fn accept_polkadot() -> Weight; 53 | } 54 | 55 | /// Weights for pallet_account_linker using the Substrate node and recommended hardware. 56 | pub struct SubstrateWeight(PhantomData); 57 | impl WeightInfo for SubstrateWeight { 58 | fn link_eth() -> Weight { 59 | (324_000_000 as Weight) 60 | .saturating_add(T::DbWeight::get().reads(1 as Weight)) 61 | .saturating_add(T::DbWeight::get().writes(1 as Weight)) 62 | } 63 | fn link_btc() -> Weight { 64 | (335_000_000 as Weight) 65 | .saturating_add(T::DbWeight::get().reads(1 as Weight)) 66 | .saturating_add(T::DbWeight::get().writes(1 as Weight)) 67 | } 68 | fn link_polkadot() -> Weight { 69 | (335_000_000 as Weight).saturating_add(T::DbWeight::get().reads(1 as Weight)) 70 | } 71 | fn accept_polkadot() -> Weight { 72 | (335_000_000 as Weight) 73 | .saturating_add(T::DbWeight::get().reads(1 as Weight)) 74 | .saturating_add(T::DbWeight::get().writes(1 as Weight)) 75 | } 76 | } 77 | 78 | // For backwards compatibility and tests 79 | impl WeightInfo for () { 80 | fn link_eth() -> Weight { 81 | (324_000_000 as Weight) 82 | .saturating_add(RocksDbWeight::get().reads(1 as Weight)) 83 | .saturating_add(RocksDbWeight::get().writes(1 as Weight)) 84 | } 85 | fn link_btc() -> Weight { 86 | (335_000_000 as Weight) 87 | .saturating_add(RocksDbWeight::get().reads(1 as Weight)) 88 | .saturating_add(RocksDbWeight::get().writes(1 as Weight)) 89 | } 90 | fn link_polkadot() -> Weight { 91 | (335_000_000 as Weight).saturating_add(RocksDbWeight::get().reads(1 as Weight)) 92 | } 93 | fn accept_polkadot() -> Weight { 94 | (335_000_000 as Weight) 95 | .saturating_add(RocksDbWeight::get().reads(1 as Weight)) 96 | .saturating_add(RocksDbWeight::get().writes(1 as Weight)) 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /merkle-distributor/merkle-tree.ts: -------------------------------------------------------------------------------- 1 | import { bufferToHex, keccak256 } from "ethereumjs-util"; 2 | 3 | export default class MerkleTree { 4 | private readonly elements: Buffer[]; 5 | private readonly bufferElementPositionIndex: { [hexElement: string]: number }; 6 | private readonly layers: Buffer[][]; 7 | 8 | constructor(elements: Buffer[]) { 9 | this.elements = [...elements]; 10 | // Sort elements 11 | this.elements.sort(Buffer.compare); 12 | // Deduplicate elements 13 | this.elements = MerkleTree.bufDedup(this.elements); 14 | 15 | this.bufferElementPositionIndex = this.elements.reduce<{ 16 | [hexElement: string]: number; 17 | }>((memo, el, index) => { 18 | memo[bufferToHex(el)] = index; 19 | return memo; 20 | }, {}); 21 | 22 | // Create layers 23 | this.layers = this.getLayers(this.elements); 24 | } 25 | 26 | getLayers(elements: Buffer[]): Buffer[][] { 27 | if (elements.length === 0) { 28 | throw new Error("empty tree"); 29 | } 30 | 31 | const layers = []; 32 | layers.push(elements); 33 | 34 | // Get next layer until we reach the root 35 | while (layers[layers.length - 1].length > 1) { 36 | layers.push(this.getNextLayer(layers[layers.length - 1])); 37 | } 38 | return layers; 39 | } 40 | 41 | getNextLayer(elements: Buffer[]): Buffer[] { 42 | return elements.reduce((layer, el, idx, arr) => { 43 | if (idx % 2 === 0) { 44 | // Hash the current element with its pair element 45 | layer.push(MerkleTree.combinedHash(el, arr[idx + 1])); 46 | } 47 | 48 | return layer; 49 | }, []); 50 | } 51 | 52 | static combinedHash(first: Buffer, second: Buffer): Buffer { 53 | if (!first) { 54 | return second; 55 | } 56 | if (!second) { 57 | return first; 58 | } 59 | 60 | return keccak256(MerkleTree.sortAndConcat(first, second)); 61 | } 62 | 63 | getRoot(): Buffer { 64 | return this.layers[this.layers.length - 1][0]; 65 | } 66 | 67 | getHexRoot(): string { 68 | return bufferToHex(this.getRoot()); 69 | } 70 | 71 | getProof(el: Buffer) { 72 | let idx = this.bufferElementPositionIndex[bufferToHex(el)]; 73 | 74 | if (typeof idx !== "number") { 75 | throw new Error("Element does not exist in Merkle tree"); 76 | } 77 | 78 | return this.layers.reduce((proof, layer) => { 79 | const pairElement = MerkleTree.getPairElement(idx, layer); 80 | 81 | if (pairElement) { 82 | proof.push(pairElement); 83 | } 84 | 85 | idx = Math.floor(idx / 2); 86 | 87 | return proof; 88 | }, []); 89 | } 90 | 91 | getHexProof(el: Buffer): string[] { 92 | const proof = this.getProof(el); 93 | 94 | return MerkleTree.bufArrToHexArr(proof); 95 | } 96 | 97 | getLeaves(): string[] { 98 | return MerkleTree.bufArrToHexArr(this.elements); 99 | } 100 | 101 | private static getPairElement(idx: number, layer: Buffer[]): Buffer | null { 102 | const pairIdx = idx % 2 === 0 ? idx + 1 : idx - 1; 103 | 104 | if (pairIdx < layer.length) { 105 | return layer[pairIdx]; 106 | } else { 107 | return null; 108 | } 109 | } 110 | 111 | private static bufDedup(elements: Buffer[]): Buffer[] { 112 | return elements.filter((el, idx) => { 113 | return idx === 0 || !elements[idx - 1].equals(el); 114 | }); 115 | } 116 | 117 | private static bufArrToHexArr(arr: Buffer[]): string[] { 118 | if (arr.some((el) => !Buffer.isBuffer(el))) { 119 | throw new Error("Array is not an array of buffers"); 120 | } 121 | 122 | return arr.map((el) => "0x" + el.toString("hex")); 123 | } 124 | 125 | private static sortAndConcat(...args: Buffer[]): Buffer { 126 | return Buffer.concat([...args].sort(Buffer.compare)); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /pallets/nft/src/weights.rs: -------------------------------------------------------------------------------- 1 | // This file is part of Substrate. 2 | 3 | // Copyright (C) 2021 Parity Technologies (UK) Ltd. 4 | // SPDX-License-Identifier: Apache-2.0 5 | 6 | // Licensed under the Apache License, Version 2.0 (the "License"); 7 | // you may not use this file except in compliance with the License. 8 | // You may obtain a copy of the License at 9 | // 10 | // http://www.apache.org/licenses/LICENSE-2.0 11 | // 12 | // Unless required by applicable law or agreed to in writing, software 13 | // distributed under the License is distributed on an "AS IS" BASIS, 14 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | // See the License for the specific language governing permissions and 16 | // limitations under the License. 17 | 18 | //! Autogenerated weights for pallet_account_linker 19 | //! 20 | //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 3.0.0 21 | //! DATE: 2021-02-25, STEPS: [20, ], REPEAT: 50, LOW RANGE: [], HIGH RANGE: [] 22 | //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 23 | 24 | // Executed Command: 25 | // target/release/litentry-node 26 | // benchmark 27 | // --chain=dev 28 | // --execution=wasm 29 | // --wasm-execution=compiled 30 | // --pallet=pallet_account_linker 31 | // --extrinsic=* 32 | // --heap-pages=4096 33 | // --steps=20 34 | // --repeat=50 35 | // --output=./pallets/account-linker/src/weights.rs 36 | // --template=./.maintain/frame-weight-template.hbs 37 | 38 | 39 | #![cfg_attr(rustfmt, rustfmt_skip)] 40 | #![allow(unused_parens)] 41 | #![allow(unused_imports)] 42 | #![allow(clippy::unnecessary_cast)] 43 | 44 | use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; 45 | use sp_std::marker::PhantomData; 46 | 47 | /// Weight functions needed for module_nft. 48 | pub trait WeightInfo { 49 | fn create_class() -> Weight; 50 | fn mint(i: u32, ) -> Weight; 51 | fn transfer() -> Weight; 52 | fn burn() -> Weight; 53 | fn burn_with_remark(b: u32, ) -> Weight; 54 | } 55 | 56 | /// Weights for module_nft using the Acala node and recommended hardware. 57 | pub struct SubstrateWeight(PhantomData); 58 | impl WeightInfo for SubstrateWeight { 59 | fn create_class() -> Weight { 60 | (200_357_000 as Weight) 61 | .saturating_add(T::DbWeight::get().reads(3 as Weight)) 62 | .saturating_add(T::DbWeight::get().writes(4 as Weight)) 63 | } 64 | fn mint(i: u32, ) -> Weight { 65 | (0 as Weight) 66 | // Standard Error: 5_000 67 | .saturating_add((17_893_000 as Weight).saturating_mul(i as Weight)) 68 | .saturating_add(T::DbWeight::get().reads(3 as Weight)) 69 | .saturating_add(T::DbWeight::get().writes(3 as Weight)) 70 | .saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(i as Weight))) 71 | } 72 | fn transfer() -> Weight { 73 | (54_749_000 as Weight) 74 | .saturating_add(T::DbWeight::get().reads(2 as Weight)) 75 | .saturating_add(T::DbWeight::get().writes(3 as Weight)) 76 | } 77 | fn burn() -> Weight { 78 | (154_177_000 as Weight) 79 | .saturating_add(T::DbWeight::get().reads(4 as Weight)) 80 | .saturating_add(T::DbWeight::get().writes(5 as Weight)) 81 | } 82 | fn burn_with_remark(b: u32, ) -> Weight { 83 | (154_177_000 as Weight) 84 | .saturating_add(T::DbWeight::get().reads(4 as Weight)) 85 | .saturating_add(T::DbWeight::get().writes(5 as Weight)) 86 | .saturating_add((1_000 as Weight).saturating_mul(b as Weight)) 87 | } 88 | } 89 | 90 | // For backwards compatibility and tests 91 | impl WeightInfo for () { 92 | fn create_class() -> Weight { 93 | (200_357_000 as Weight) 94 | .saturating_add(RocksDbWeight::get().reads(3 as Weight)) 95 | .saturating_add(RocksDbWeight::get().writes(4 as Weight)) 96 | } 97 | fn mint(i: u32, ) -> Weight { 98 | (0 as Weight) 99 | // Standard Error: 5_000 100 | .saturating_add((17_893_000 as Weight).saturating_mul(i as Weight)) 101 | .saturating_add(RocksDbWeight::get().reads(3 as Weight)) 102 | .saturating_add(RocksDbWeight::get().writes(3 as Weight)) 103 | .saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(i as Weight))) 104 | } 105 | fn transfer() -> Weight { 106 | (54_749_000 as Weight) 107 | .saturating_add(RocksDbWeight::get().reads(2 as Weight)) 108 | .saturating_add(RocksDbWeight::get().writes(3 as Weight)) 109 | } 110 | fn burn() -> Weight { 111 | (154_177_000 as Weight) 112 | .saturating_add(RocksDbWeight::get().reads(4 as Weight)) 113 | .saturating_add(RocksDbWeight::get().writes(5 as Weight)) 114 | } 115 | fn burn_with_remark(b: u32, ) -> Weight { 116 | (154_177_000 as Weight) 117 | .saturating_add(RocksDbWeight::get().reads(4 as Weight)) 118 | .saturating_add(RocksDbWeight::get().writes(5 as Weight)) 119 | .saturating_add((1_000 as Weight).saturating_mul(b as Weight)) 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /pallets/account-linker/src/btc/base58.rs: -------------------------------------------------------------------------------- 1 | //! Based on https://github.com/debris/base58/blob/master/src/lib.rs 2 | //! works only up to 128 bytes 3 | use sp_std::prelude::*; 4 | 5 | const ALPHABET: &'static [u8] = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; 6 | 7 | /// A trait for converting a value to base58 encoded string. 8 | pub trait ToBase58 { 9 | /// Converts a value of `self` to a base58 value, returning the owned string. 10 | fn to_base58(&self) -> Vec; 11 | } 12 | 13 | impl ToBase58 for [u8] { 14 | fn to_base58(&self) -> Vec { 15 | let zcount = self.iter().take_while(|x| **x == 0).count(); 16 | let size = (self.len() - zcount) * 138 / 100 + 1; 17 | let mut buffer = vec![0u8; size]; 18 | 19 | let mut i = zcount; 20 | let mut high = size - 1; 21 | 22 | while i < self.len() { 23 | let mut carry = self[i] as u32; 24 | let mut j = size - 1; 25 | 26 | while j > high || carry != 0 { 27 | carry += 256 * buffer[j] as u32; 28 | buffer[j] = (carry % 58) as u8; 29 | carry /= 58; 30 | 31 | // in original trezor implementation it was underflowing 32 | if j > 0 { 33 | j -= 1; 34 | } 35 | } 36 | 37 | i += 1; 38 | high = j; 39 | } 40 | 41 | let mut j = buffer.iter().take_while(|x| **x == 0).count(); 42 | 43 | let mut result = Vec::new(); 44 | for _ in 0..zcount { 45 | result.push(b'1'); 46 | } 47 | 48 | while j < size { 49 | result.push(ALPHABET[buffer[j] as usize]); 50 | j += 1; 51 | } 52 | 53 | result 54 | } 55 | } 56 | 57 | #[cfg(test)] 58 | mod tests { 59 | use super::ToBase58; 60 | use hex::decode; 61 | use std::str::from_utf8; 62 | 63 | #[test] 64 | fn test_to_base58_basic() { 65 | assert_eq!(from_utf8(&b"".to_base58()).unwrap(), ""); 66 | assert_eq!(from_utf8(&[32].to_base58()).unwrap(), "Z"); 67 | assert_eq!(from_utf8(&[45].to_base58()).unwrap(), "n"); 68 | assert_eq!(from_utf8(&[48].to_base58()).unwrap(), "q"); 69 | assert_eq!(from_utf8(&[49].to_base58()).unwrap(), "r"); 70 | assert_eq!(from_utf8(&[57].to_base58()).unwrap(), "z"); 71 | assert_eq!(from_utf8(&[45, 49].to_base58()).unwrap(), "4SU"); 72 | assert_eq!(from_utf8(&[49, 49].to_base58()).unwrap(), "4k8"); 73 | assert_eq!(from_utf8(&b"abc".to_base58()).unwrap(), "ZiCa"); 74 | assert_eq!(from_utf8(&b"1234598760".to_base58()).unwrap(), "3mJr7AoUXx2Wqd"); 75 | assert_eq!( 76 | from_utf8(&b"abcdefghijklmnopqrstuvwxyz".to_base58()).unwrap(), 77 | "3yxU3u1igY8WkgtjK92fbJQCd4BZiiT1v25f" 78 | ); 79 | } 80 | 81 | #[test] 82 | fn test_to_base58_initial_zeros() { 83 | assert_eq!(from_utf8(&b"\0abc".to_base58()).unwrap(), "1ZiCa"); 84 | assert_eq!(from_utf8(&b"\0\0abc".to_base58()).unwrap(), "11ZiCa"); 85 | assert_eq!(from_utf8(&b"\0\0\0abc".to_base58()).unwrap(), "111ZiCa"); 86 | assert_eq!(from_utf8(&b"\0\0\0\0abc".to_base58()).unwrap(), "1111ZiCa"); 87 | } 88 | 89 | /// https://github.com/bitcoin/bitcoin/blob/master/src/test/data/base58_encode_decode.json 90 | /// NB: left is hex data 91 | #[test] 92 | fn test_to_base58_bitcoin_repo_cases() { 93 | let test_cases = vec![ 94 | ("", ""), 95 | ("61", "2g"), 96 | ("626262", "a3gV"), 97 | ("636363", "aPEr"), 98 | ("73696d706c792061206c6f6e6720737472696e67", "2cFupjhnEsSn59qHXstmK2ffpLv2"), 99 | ("00eb15231dfceb60925886b67d065299925915aeb172c06647", "1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L"), 100 | ("516b6fcd0f", "ABnLTmg"), 101 | ("bf4f89001e670274dd", "3SEo3LWLoPntC"), 102 | ("572e4794", "3EFU7m"), 103 | ("ecac89cad93923c02321", "EJDM8drfXA6uyA"), 104 | ("10c8511e", "Rt5zm"), 105 | ("00000000000000000000", "1111111111"), 106 | ("000111d38e5fc9071ffcd20b4a763cc9ae4f252bb4e48fd66a835e252ada93ff480d6dd43dc62a641155a5", "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"), 107 | ("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", "1cWB5HCBdLjAuqGGReWE3R3CguuwSjw6RHn39s2yuDRTS5NsBgNiFpWgAnEx6VQi8csexkgYw3mdYrMHr8x9i7aEwP8kZ7vccXWqKDvGv3u1GxFKPuAkn8JCPPGDMf3vMMnbzm6Nh9zh1gcNsMvH3ZNLmP5fSG6DGbbi2tuwMWPthr4boWwCxf7ewSgNQeacyozhKDDQQ1qL5fQFUW52QKUZDZ5fw3KXNQJMcNTcaB723LchjeKun7MuGW5qyCBZYzA1KjofN1gYBV3NqyhQJ3Ns746GNuf9N2pQPmHz4xpnSrrfCvy6TVVz5d4PdrjeshsWQwpZsZGzvbdAdN8MKV5QsBDY") 108 | ]; 109 | 110 | for test_case in test_cases.into_iter() { 111 | let (input, output) = test_case; 112 | let input = decode(input).unwrap(); 113 | assert_eq!(from_utf8(&input.to_base58()).unwrap(), output); 114 | } 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /pallets/account-linker/src/tests/btc.rs: -------------------------------------------------------------------------------- 1 | use crate::mock::*; 2 | 3 | use codec::Encode; 4 | use frame_support::{assert_noop, assert_ok}; 5 | use parity_crypto::Keccak256; 6 | use sp_runtime::AccountId32; 7 | 8 | use bitcoin::{ 9 | network::constants::Network, 10 | secp256k1::{rand::thread_rng, Message as BTCMessage, Secp256k1}, 11 | util::{address::Address, key}, 12 | }; 13 | 14 | #[test] 15 | fn test_invalid_expiring_block_number_btc() { 16 | new_test_ext().execute_with(|| { 17 | // Generate random key pair 18 | let s = Secp256k1::new(); 19 | let pair = s.generate_keypair(&mut thread_rng()); 20 | let public_key = key::PublicKey { compressed: true, key: pair.1 }; 21 | 22 | // Generate pay-to-pubkey-hash address 23 | let address = Address::p2pkh(&public_key, Network::Bitcoin); 24 | 25 | let account: AccountId32 = AccountId32::from([255u8; 32]); 26 | let block_number: u32 = crate::EXPIRING_BLOCK_NUMBER_MAX + 1; 27 | 28 | let mut bytes = b"Link Litentry: ".encode(); 29 | let mut account_vec = account.encode(); 30 | let mut expiring_block_number_vec = block_number.encode(); 31 | 32 | bytes.append(&mut account_vec); 33 | bytes.append(&mut expiring_block_number_vec); 34 | 35 | let message = BTCMessage::from_slice(&bytes.keccak256()).unwrap(); 36 | 37 | let (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact(); 38 | 39 | let mut sig = [0u8; 65]; 40 | sig[..64].copy_from_slice(&rs[..]); 41 | sig[64] = v.to_i32() as u8; 42 | 43 | assert_noop!( 44 | AccountLinker::link_btc( 45 | Origin::signed(account.clone()), 46 | account.clone(), 47 | 0, 48 | address.clone().to_string().as_bytes().to_vec(), 49 | block_number, 50 | sig 51 | ), 52 | AccountLinkerError::InvalidExpiringBlockNumber 53 | ); 54 | }); 55 | } 56 | 57 | #[test] 58 | fn test_btc_link_p2pkh() { 59 | new_test_ext().execute_with(|| { 60 | run_to_block(1); 61 | 62 | // Generate random key pair 63 | let s = Secp256k1::new(); 64 | let pair = s.generate_keypair(&mut thread_rng()); 65 | let public_key = key::PublicKey { compressed: true, key: pair.1 }; 66 | 67 | // Generate pay-to-pubkey-hash address 68 | let address = Address::p2pkh(&public_key, Network::Bitcoin); 69 | 70 | let account: AccountId32 = AccountId32::from([255u8; 32]); 71 | let block_number: u32 = 99999; 72 | 73 | let mut bytes = b"Link Litentry: ".encode(); 74 | let mut account_vec = account.encode(); 75 | let mut expiring_block_number_vec = block_number.encode(); 76 | 77 | bytes.append(&mut account_vec); 78 | bytes.append(&mut expiring_block_number_vec); 79 | 80 | let message = BTCMessage::from_slice(&bytes.keccak256()).unwrap(); 81 | 82 | let (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact(); 83 | 84 | let mut sig = [0u8; 65]; 85 | sig[..64].copy_from_slice(&rs[..]); 86 | sig[64] = v.to_i32() as u8; 87 | 88 | let addr_expected = address.clone().to_string().as_bytes().to_vec(); 89 | 90 | assert_ok!(AccountLinker::link_btc( 91 | Origin::signed(account.clone()), 92 | account.clone(), 93 | 0, 94 | addr_expected.clone(), 95 | block_number, 96 | sig 97 | )); 98 | 99 | let addr_stored = 100 | String::from_utf8(AccountLinker::btc_addresses(&account)[0].clone()).unwrap(); 101 | 102 | assert_eq!(addr_stored, address.to_string()); 103 | 104 | assert_eq!( 105 | events(), 106 | [Event::AccountLinker(crate::Event::BtcAddressLinked(account.clone(), addr_expected)),] 107 | ); 108 | }); 109 | } 110 | 111 | #[test] 112 | fn test_btc_link_p2wpkh() { 113 | new_test_ext().execute_with(|| { 114 | run_to_block(1); 115 | 116 | // Generate random key pair 117 | let s = Secp256k1::new(); 118 | let pair = s.generate_keypair(&mut thread_rng()); 119 | let public_key = key::PublicKey { compressed: true, key: pair.1 }; 120 | 121 | // Generate pay-to-pubkey-hash address 122 | let address = Address::p2wpkh(&public_key, Network::Bitcoin).unwrap(); 123 | 124 | println!("{}", address); 125 | let account: AccountId32 = AccountId32::from([255u8; 32]); 126 | let block_number: u32 = 99999; 127 | 128 | let mut bytes = b"Link Litentry: ".encode(); 129 | let mut account_vec = account.encode(); 130 | let mut expiring_block_number_vec = block_number.encode(); 131 | 132 | bytes.append(&mut account_vec); 133 | bytes.append(&mut expiring_block_number_vec); 134 | 135 | let message = BTCMessage::from_slice(&bytes.keccak256()).unwrap(); 136 | 137 | let (v, rs) = s.sign_recoverable(&message, &pair.0).serialize_compact(); 138 | 139 | let mut sig = [0u8; 65]; 140 | sig[..64].copy_from_slice(&rs[..]); 141 | sig[64] = v.to_i32() as u8; 142 | 143 | let addr_expected = address.clone().to_string().as_bytes().to_vec(); 144 | 145 | assert_ok!(AccountLinker::link_btc( 146 | Origin::signed(account.clone()), 147 | account.clone(), 148 | 0, 149 | addr_expected.clone(), 150 | block_number, 151 | sig 152 | )); 153 | 154 | let addr_stored = 155 | String::from_utf8(AccountLinker::btc_addresses(&account)[0].clone()).unwrap(); 156 | 157 | assert_eq!(addr_stored, address.to_string()); 158 | 159 | assert_eq!( 160 | events(), 161 | [Event::AccountLinker(crate::Event::BtcAddressLinked(account.clone(), addr_expected)),] 162 | ); 163 | }); 164 | } 165 | -------------------------------------------------------------------------------- /pallets/account-linker/src/util_eth.rs: -------------------------------------------------------------------------------- 1 | use codec::Encode; 2 | use sp_std::prelude::*; 3 | 4 | pub fn addr_from_sig(msg: [u8; 32], sig: [u8; 65]) -> Result<[u8; 20], sp_io::EcdsaVerifyError> { 5 | let pubkey = sp_io::crypto::secp256k1_ecdsa_recover(&sig, &msg)?; 6 | let hashed_pk = sp_io::hashing::keccak_256(&pubkey); 7 | 8 | let mut addr = [0u8; 20]; 9 | addr[..20].copy_from_slice(&hashed_pk[12..32]); 10 | Ok(addr) 11 | } 12 | 13 | /// Returns a eth_sign-compatible hash of data to sign. 14 | /// The data is prefixed with special message to prevent 15 | /// malicious DApps from using the function to sign forged transactions. 16 | pub fn eth_data_hash(mut data: Vec) -> Result<[u8; 32], &'static str> { 17 | const MSG_LEN: usize = 51; 18 | if data.len() != MSG_LEN { 19 | log::error!( 20 | "Ethereum message has an unexpected length {} !!! Expected is {}.", 21 | data.len(), 22 | MSG_LEN 23 | ); 24 | return Err("Unexpected ethereum message length!") 25 | } 26 | let mut length_bytes = usize_to_u8_array(data.len())?; 27 | let mut eth_data = b"\x19Ethereum Signed Message:\n".encode(); 28 | eth_data.append(&mut length_bytes); 29 | eth_data.append(&mut data); 30 | Ok(sp_io::hashing::keccak_256(ð_data)) 31 | } 32 | 33 | /// Convert a usize type to a u8 array. 34 | /// The input is first converted as a string with decimal presentation, 35 | /// and then this string is converted to a byte array with UTF8 encoding. 36 | /// To avoid unnecessary complexity, the current function supports up to 37 | /// 2 digits unsigned decimal (range 0 - 99) 38 | fn usize_to_u8_array(length: usize) -> Result, &'static str> { 39 | if length >= 100 { 40 | Err("Unexpected ethereum message length!") 41 | } else { 42 | let digits = b"0123456789".encode(); 43 | let tens = length / 10; 44 | let ones = length % 10; 45 | 46 | let mut vec_res: Vec = Vec::new(); 47 | if tens != 0 { 48 | vec_res.push(digits[tens]); 49 | } 50 | vec_res.push(digits[ones]); 51 | Ok(vec_res) 52 | } 53 | } 54 | 55 | #[cfg(test)] 56 | mod tests { 57 | use super::*; 58 | use hex::decode; 59 | 60 | // A test helper function to add ethereum prefix before message hashing 61 | pub fn eth_data_hash_test_helper(mut data: Vec) -> [u8; 32] { 62 | let mut message_data = format!("\x19Ethereum Signed Message:\n{}", data.len()).into_bytes(); 63 | message_data.append(&mut data); 64 | sp_io::hashing::keccak_256(&message_data) 65 | } 66 | 67 | #[test] 68 | fn correct_recover() { 69 | let msg = decode("61626364656667").unwrap(); 70 | let msg = eth_data_hash_test_helper(msg); 71 | 72 | let sig_bytes = decode("5900a81f236e27be7ee2c796e0de9b383aadcd8b3c53fd881dd378f4c2bc1a54406be632a464c197131c668432f32a966a19354920686a8f8fdd9c9ab0a0dd011b").unwrap(); 73 | let mut sig = [0u8; 65]; 74 | sig[0..65].copy_from_slice(&sig_bytes[0..65]); 75 | 76 | let addr_expected_bytes = decode("Fe7cef4F3A7eF57Ac2401122fB51590bfDf9350a").unwrap(); 77 | let mut addr_expected = [0u8; 20]; 78 | addr_expected[0..20].copy_from_slice(&addr_expected_bytes[0..20]); 79 | 80 | let addr = addr_from_sig(msg, sig).ok().unwrap(); 81 | assert_eq!(addr, addr_expected); 82 | } 83 | 84 | #[test] 85 | fn wrong_msg() { 86 | let msg = decode("626364656667").unwrap(); 87 | let msg = eth_data_hash_test_helper(msg); 88 | 89 | let sig_bytes = decode("5900a81f236e27be7ee2c796e0de9b383aadcd8b3c53fd881dd378f4c2bc1a54406be632a464c197131c668432f32a966a19354920686a8f8fdd9c9ab0a0dd011b").unwrap(); 90 | let mut sig = [0u8; 65]; 91 | sig[0..65].copy_from_slice(&sig_bytes[0..65]); 92 | 93 | let addr_expected_bytes = decode("Fe7cef4F3A7eF57Ac2401122fB51590bfDf9350a").unwrap(); 94 | let mut addr_expected = [0u8; 20]; 95 | addr_expected[0..20].copy_from_slice(&addr_expected_bytes[0..20]); 96 | 97 | let addr = addr_from_sig(msg, sig).ok().unwrap(); 98 | assert_ne!(addr, addr_expected); 99 | } 100 | 101 | #[test] 102 | fn sig_from_another_addr() { 103 | let msg = decode("61626364656667").unwrap(); 104 | let msg = eth_data_hash_test_helper(msg); 105 | 106 | let sig_bytes = decode("a4543cd17d07a9b5207bbf4ccf3c9d47e0a292a6ce461427ebc50de24387887b14584651c3bc11376ba9fe662df325ced20f5c30dd782b6bee15cb474c206a341b").unwrap(); 107 | let mut sig = [0u8; 65]; 108 | sig[0..65].copy_from_slice(&sig_bytes[0..65]); 109 | 110 | let addr_expected_bytes = decode("Fe7cef4F3A7eF57Ac2401122fB51590bfDf9350a").unwrap(); 111 | let mut addr_expected = [0u8; 20]; 112 | addr_expected[0..20].copy_from_slice(&addr_expected_bytes[0..20]); 113 | 114 | let addr = addr_from_sig(msg, sig).ok().unwrap(); 115 | assert_ne!(addr, addr_expected); 116 | } 117 | 118 | #[test] 119 | fn msg_with_unexpected_length() { 120 | let msg = b"Link Litentry: 0123456789abcdef0123456789abcdef999".encode(); 121 | assert_eq!(Err("Unexpected ethereum message length!"), eth_data_hash(msg)); 122 | } 123 | 124 | #[test] 125 | fn msg_with_expected_length() { 126 | let msg = b"Link Litentry: 0123456789abcdef0123456789abcdef9999".encode(); 127 | let res = eth_data_hash(msg.clone()).ok().unwrap(); 128 | assert_eq!(eth_data_hash_test_helper(msg), res); 129 | } 130 | 131 | // Test input with more than 2 digits 132 | #[test] 133 | fn usize_to_u8_array_input_too_large() { 134 | let len: usize = 105; 135 | assert_eq!(Err("Unexpected ethereum message length!"), usize_to_u8_array(len)) 136 | } 137 | 138 | // Test inputs with one and two digits respectively 139 | // UTF8 Table: 140 | // 4 - 0x34 - 52 141 | // 0 - 0x30 - 48 142 | #[test] 143 | fn usize_to_u8_array_input_one_digit() { 144 | let len: usize = 4; 145 | assert_eq!(Ok(vec![52]), usize_to_u8_array(len)) 146 | } 147 | 148 | #[test] 149 | fn usize_to_u8_array_input_two_digits() { 150 | let len: usize = 40; 151 | assert_eq!(Ok(vec![52, 48]), usize_to_u8_array(len)) 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /pallets/nft/src/benchmarking.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "runtime-benchmarks")] 2 | 3 | use crate::Pallet as NFT; 4 | use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite}; 5 | use frame_system::RawOrigin; 6 | use sp_runtime::traits::StaticLookup; 7 | 8 | pub use crate::*; 9 | 10 | const SEED: u32 = 0; 11 | 12 | benchmarks! { 13 | // create simple NFT class 14 | create_class { 15 | let alice: T::AccountId = account("alice", 0, SEED); 16 | ::Currency::make_free_balance_be(&alice, (CREATION_FEE + 10).into()); 17 | }: _(RawOrigin::Signed(alice), 18 | vec![1], 19 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 20 | None, 21 | None, 22 | ClassType::Simple(999999999) 23 | ) 24 | 25 | // mint simple NFT instances 26 | mint { 27 | let i in 1 .. 1000; 28 | 29 | let alice: T::AccountId = account("alice", 0, SEED); 30 | let bob: T::AccountId = account("bob", 0, SEED); 31 | let bob_lookup = T::Lookup::unlookup(bob); 32 | 33 | ::Currency::make_free_balance_be(&alice, (CREATION_FEE + 10).into()); 34 | 35 | crate::Pallet::::create_class( 36 | RawOrigin::Signed(alice.clone()).into(), 37 | vec![1], 38 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 39 | None, 40 | None, 41 | ClassType::Simple(999999999) 42 | )?; 43 | }: _(RawOrigin::Signed(alice), bob_lookup, 0u32.into(), vec![1], i) 44 | 45 | // TODO: use a more realistic Merkle tree 46 | // claim a claim class NFT 47 | claim { 48 | let alice: T::AccountId = account("alice", 0, SEED); 49 | 50 | // account id of bob 0xd43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d 51 | let bob_bytes = [ 52 | 0xd4, 0x35, 0x93, 0xc7, 0x15, 0xfd, 0xd3, 0x1c, 0x61, 0x14, 0x1a, 0xbd, 0x04, 0xa9, 53 | 0x9f, 0xd6, 0x82, 0x2c, 0x85, 0x58, 0x85, 0x4c, 0xcd, 0xe3, 0x9a, 0x56, 0x84, 0xe7, 54 | 0xa5, 0x6d, 0xa2, 0x7d, 55 | ]; 56 | let bob = T::AccountId::decode(&mut &bob_bytes[..]).expect("32 bytes can always construct an AccountId32"); 57 | 58 | // root is 0xa8a5ec29a3df3c5a8aa6fd2935d2414cf0ce4f748a13bb2833214c3b94a6d3b3 59 | let merkle_root = [ 60 | 0xa8, 0xa5, 0xec, 0x29, 0xa3, 0xdf, 0x3c, 0x5a, 0x8a, 0xa6, 0xfd, 0x29, 0x35, 0xd2, 61 | 0x41, 0x4c, 0xf0, 0xce, 0x4f, 0x74, 0x8a, 0x13, 0xbb, 0x28, 0x33, 0x21, 0x4c, 0x3b, 62 | 0x94, 0xa6, 0xd3, 0xb3, 63 | ]; 64 | 65 | // proof of bob is 0x5182a73e48bd6e814d0c2b41672d9cb8c87c4221b55bc08e0943198e90caad1f 66 | let bob_proof = vec![[ 67 | 0x51u8, 0x82u8, 0xa7u8, 0x3eu8, 0x48u8, 0xbdu8, 0x6eu8, 0x81u8, 0x4du8, 0x0cu8, 0x2bu8, 68 | 0x41u8, 0x67u8, 0x2du8, 0x9cu8, 0xb8u8, 0xc8u8, 0x7cu8, 0x42u8, 0x21u8, 0xb5u8, 0x5bu8, 69 | 0xc0u8, 0x8eu8, 0x09u8, 0x43u8, 0x19u8, 0x8eu8, 0x90u8, 0xcau8, 0xadu8, 0x1fu8, 70 | ]]; 71 | 72 | ::Currency::make_free_balance_be(&alice, (CREATION_FEE + 10).into()); 73 | 74 | crate::Pallet::::create_class( 75 | RawOrigin::Signed(alice.clone()).into(), 76 | vec![1], 77 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 78 | None, 79 | None, 80 | ClassType::Claim(merkle_root) 81 | )?; 82 | }: _(RawOrigin::Signed(bob), 0, 0u32.into(), bob_proof) 83 | 84 | // merge two simple NFT instances 85 | merge { 86 | let total = 1000; 87 | let i in 0 .. 999; 88 | 89 | let alice: T::AccountId = account("alice", 0, SEED); 90 | let bob: T::AccountId = account("bob", 0, SEED); 91 | let bob_lookup = T::Lookup::unlookup(bob.clone()); 92 | 93 | ::Currency::make_free_balance_be(&alice, (3 * CREATION_FEE + 10).into()); 94 | 95 | crate::Pallet::::create_class( 96 | RawOrigin::Signed(alice.clone()).into(), 97 | vec![1], 98 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 99 | None, 100 | None, 101 | ClassType::Simple(999999999) 102 | )?; 103 | 104 | crate::Pallet::::create_class( 105 | RawOrigin::Signed(alice.clone()).into(), 106 | vec![1], 107 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 108 | None, 109 | None, 110 | ClassType::Simple(999999999) 111 | )?; 112 | 113 | crate::Pallet::::mint(RawOrigin::Signed(alice.clone()).into(), bob_lookup.clone(), 0u32.into(), vec![1], total)?; 114 | 115 | crate::Pallet::::create_class( 116 | RawOrigin::Signed(alice.clone()).into(), 117 | vec![1], 118 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 119 | None, 120 | None, 121 | ClassType::Merge(0u32.into(), 1u32.into(), false) 122 | )?; 123 | 124 | crate::Pallet::::mint(RawOrigin::Signed(alice).into(), bob_lookup, 1u32.into(), vec![1], total)?; 125 | 126 | }: _(RawOrigin::Signed(bob), 2u32.into(), (0u32.into(), i.into()), (1u32.into(), i.into())) 127 | 128 | // transfer a simple NFT instance 129 | transfer { 130 | let alice: T::AccountId = account("alice", 0, SEED); 131 | let alice_lookup = T::Lookup::unlookup(alice.clone()); 132 | let bob: T::AccountId = account("bob", 0, SEED); 133 | let bob_lookup = T::Lookup::unlookup(bob.clone()); 134 | 135 | ::Currency::make_free_balance_be(&alice, (CREATION_FEE + 10).into()); 136 | 137 | crate::Pallet::::create_class( 138 | RawOrigin::Signed(alice.clone()).into(), 139 | vec![1], 140 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 141 | None, 142 | None, 143 | ClassType::Simple(999999999) 144 | )?; 145 | 146 | crate::Pallet::::mint(RawOrigin::Signed(alice).into(), bob_lookup, 0u32.into(), vec![1], 1)?; 147 | }: _(RawOrigin::Signed(bob), alice_lookup, (0u32.into(), 0u32.into())) 148 | 149 | // burn a simple NFT instance 150 | burn { 151 | let alice: T::AccountId = account("alice", 0, SEED); 152 | let bob: T::AccountId = account("bob", 0, SEED); 153 | let bob_lookup = T::Lookup::unlookup(bob.clone()); 154 | 155 | ::Currency::make_free_balance_be(&alice, (CREATION_FEE + 10).into()); 156 | 157 | crate::Pallet::::create_class( 158 | RawOrigin::Signed(alice.clone()).into(), 159 | vec![1], 160 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 161 | None, 162 | None, 163 | ClassType::Simple(999999999) 164 | )?; 165 | crate::Pallet::::mint(RawOrigin::Signed(alice).into(), bob_lookup, 0u32.into(), vec![1], 1)?; 166 | }: _(RawOrigin::Signed(bob), (0u32.into(), 0u32.into())) 167 | 168 | } 169 | 170 | impl_benchmark_test_suite!(NFT, crate::mock::new_test_ext(), crate::mock::Test,); 171 | -------------------------------------------------------------------------------- /pallets/account-linker/src/tests/eth.rs: -------------------------------------------------------------------------------- 1 | use crate::mock::*; 2 | 3 | use codec::Encode; 4 | use frame_support::{assert_noop, assert_ok}; 5 | use parity_crypto::{ 6 | publickey::{sign, Generator, KeyPair, Message, Random}, 7 | Keccak256, 8 | }; 9 | use sp_runtime::AccountId32; 10 | 11 | fn generate_msg(account: &AccountId32, block_number: u32) -> Message { 12 | let mut bytes = b"\x19Ethereum Signed Message:\n51Link Litentry: ".encode(); 13 | let mut account_vec = account.encode(); 14 | let mut expiring_block_number_vec = block_number.encode(); 15 | 16 | bytes.append(&mut account_vec); 17 | bytes.append(&mut expiring_block_number_vec); 18 | 19 | Message::from(bytes.keccak256()) 20 | } 21 | 22 | fn generate_sig(key_pair: &KeyPair, msg: &Message) -> [u8; 65] { 23 | sign(key_pair.secret(), &msg).unwrap().into_electrum() 24 | } 25 | 26 | #[test] 27 | fn test_expired_block_number_eth() { 28 | new_test_ext().execute_with(|| { 29 | let account: AccountId32 = AccountId32::from([0u8; 32]); 30 | let block_number: u32 = 0; 31 | 32 | let mut gen = Random {}; 33 | let key_pair = gen.generate(); 34 | 35 | let msg = generate_msg(&account, block_number); 36 | let sig = generate_sig(&key_pair, &msg); 37 | 38 | assert_noop!( 39 | AccountLinker::link_eth( 40 | Origin::signed(account.clone()), 41 | 0, 42 | key_pair.address().to_fixed_bytes(), 43 | block_number, 44 | sig 45 | ), 46 | AccountLinkerError::LinkRequestExpired 47 | ); 48 | }); 49 | } 50 | 51 | #[test] 52 | fn test_invalid_expiring_block_number_eth() { 53 | new_test_ext().execute_with(|| { 54 | let account: AccountId32 = AccountId32::from([0u8; 32]); 55 | let block_number: u32 = crate::EXPIRING_BLOCK_NUMBER_MAX + 1; 56 | 57 | let mut gen = Random {}; 58 | let key_pair = gen.generate(); 59 | 60 | let msg = generate_msg(&account, block_number); 61 | let sig = generate_sig(&key_pair, &msg); 62 | 63 | assert_noop!( 64 | AccountLinker::link_eth( 65 | Origin::signed(account.clone()), 66 | 0, 67 | key_pair.address().to_fixed_bytes(), 68 | block_number, 69 | sig 70 | ), 71 | AccountLinkerError::InvalidExpiringBlockNumber 72 | ); 73 | }); 74 | } 75 | 76 | #[test] 77 | fn test_unexpected_address_eth() { 78 | new_test_ext().execute_with(|| { 79 | let account: AccountId32 = AccountId32::from([72u8; 32]); 80 | let block_number: u32 = 99999; 81 | 82 | let mut gen = Random {}; 83 | let key_pair = gen.generate(); 84 | 85 | let msg = generate_msg(&account, block_number); 86 | let sig = generate_sig(&key_pair, &msg); 87 | 88 | assert_noop!( 89 | AccountLinker::link_eth( 90 | Origin::signed(account.clone()), 91 | 0, 92 | gen.generate().address().to_fixed_bytes(), 93 | block_number, 94 | sig 95 | ), 96 | AccountLinkerError::UnexpectedAddress 97 | ); 98 | }); 99 | } 100 | 101 | #[test] 102 | fn test_insert_eth_address() { 103 | new_test_ext().execute_with(|| { 104 | run_to_block(1); 105 | 106 | let account: AccountId32 = AccountId32::from([5u8; 32]); 107 | let block_number: u32 = 99999; 108 | 109 | let mut gen = Random {}; 110 | let mut expected_vec = Vec::new(); 111 | 112 | for i in 0..(MAX_ETH_LINKS) { 113 | let key_pair = gen.generate(); 114 | 115 | let msg = generate_msg(&account, block_number + i as u32); 116 | let sig = generate_sig(&key_pair, &msg); 117 | 118 | assert_ok!(AccountLinker::link_eth( 119 | Origin::signed(account.clone()), 120 | i as u32, 121 | key_pair.address().to_fixed_bytes(), 122 | block_number + i as u32, 123 | sig 124 | )); 125 | 126 | assert_eq!(AccountLinker::eth_addresses(&account).len(), i + 1); 127 | expected_vec.push(key_pair.address().to_fixed_bytes()); 128 | assert_eq!( 129 | events(), 130 | [Event::AccountLinker(crate::Event::EthAddressLinked( 131 | account.clone(), 132 | key_pair.address().to_fixed_bytes().to_vec() 133 | )),] 134 | ); 135 | } 136 | assert_eq!(AccountLinker::eth_addresses(&account), expected_vec); 137 | }); 138 | } 139 | 140 | #[test] 141 | fn test_update_eth_address() { 142 | new_test_ext().execute_with(|| { 143 | let account: AccountId32 = AccountId32::from([40u8; 32]); 144 | let block_number: u32 = 99999; 145 | 146 | let mut gen = Random {}; 147 | for i in 0..(MAX_ETH_LINKS) { 148 | let key_pair = gen.generate(); 149 | let msg = generate_msg(&account, block_number + i as u32); 150 | let sig = generate_sig(&key_pair, &msg); 151 | 152 | assert_ok!(AccountLinker::link_eth( 153 | Origin::signed(account.clone()), 154 | i as u32, 155 | key_pair.address().to_fixed_bytes(), 156 | block_number + i as u32, 157 | sig 158 | )); 159 | } 160 | 161 | let index: u32 = 2 as u32; 162 | // Retrieve previous addr 163 | let addr_before_update = AccountLinker::eth_addresses(&account)[index as usize]; 164 | // Update addr at slot `index` 165 | let key_pair = gen.generate(); 166 | let block_number = block_number + 9 as u32; 167 | let msg = generate_msg(&account, block_number); 168 | let sig = generate_sig(&key_pair, &msg); 169 | 170 | assert_ok!(AccountLinker::link_eth( 171 | Origin::signed(account.clone()), 172 | index, 173 | key_pair.address().to_fixed_bytes(), 174 | block_number, 175 | sig 176 | )); 177 | 178 | let updated_addr = AccountLinker::eth_addresses(&account)[index as usize]; 179 | assert_ne!(updated_addr, addr_before_update); 180 | assert_eq!(updated_addr, key_pair.address().to_fixed_bytes()); 181 | }); 182 | } 183 | 184 | #[test] 185 | fn test_eth_address_pool_overflow() { 186 | new_test_ext().execute_with(|| { 187 | let account: AccountId32 = AccountId32::from([113u8; 32]); 188 | let block_number: u32 = 99999; 189 | 190 | let mut gen = Random {}; 191 | let mut expected_vec = Vec::new(); 192 | 193 | for index in 0..(MAX_ETH_LINKS * 2) { 194 | let key_pair = gen.generate(); 195 | 196 | let msg = generate_msg(&account, block_number); 197 | let sig = generate_sig(&key_pair, &msg); 198 | 199 | assert_ok!(AccountLinker::link_eth( 200 | Origin::signed(account.clone()), 201 | index as u32, 202 | key_pair.address().to_fixed_bytes(), 203 | block_number, 204 | sig 205 | )); 206 | 207 | if index < MAX_ETH_LINKS { 208 | expected_vec.push(key_pair.address().to_fixed_bytes()); 209 | } else { 210 | expected_vec[MAX_ETH_LINKS - 1] = key_pair.address().to_fixed_bytes(); 211 | } 212 | } 213 | assert_eq!(AccountLinker::eth_addresses(&account).len(), MAX_ETH_LINKS); 214 | assert_eq!(AccountLinker::eth_addresses(&account), expected_vec); 215 | }); 216 | } 217 | -------------------------------------------------------------------------------- /pallets/account-linker/src/btc/witness.rs: -------------------------------------------------------------------------------- 1 | //! Based on https://github.com/sipa/bech32/tree/master/ref/rust 2 | //! // Copyright (c) 2017 Clark Moody 3 | // 4 | // Permission is hereby granted, free of charge, to any person obtaining a copy 5 | // of this software and associated documentation files (the "Software"), to deal 6 | // in the Software without restriction, including without limitation the rights 7 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | // copies of the Software, and to permit persons to whom the Software is 9 | // furnished to do so, subject to the following conditions: 10 | // 11 | // The above copyright notice and this permission notice shall be included in 12 | // all copies or substantial portions of the Software. 13 | // 14 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 20 | // THE SOFTWARE. 21 | 22 | use sp_std::prelude::*; 23 | 24 | pub struct WitnessProgram { 25 | /// Witness program version 26 | pub version: u8, 27 | /// Witness program content 28 | pub program: Vec, 29 | } 30 | 31 | impl WitnessProgram { 32 | /// Converts a Witness Program to a SegWit Address 33 | pub fn to_address(&self, hrp: Vec) -> Result, &'static str> { 34 | // Verify that the program is valid 35 | let mut data: Vec = vec![self.version]; 36 | // Convert 8-bit program into 5-bit 37 | let p5 = self.program.to_base32(); 38 | // let p5 = convert_bits(self.program.to_vec(), 8, 5, true)?; 39 | data.extend_from_slice(&p5); 40 | let b32 = data.encode(hrp)?; 41 | Ok(b32) 42 | } 43 | 44 | /// Extracts a WitnessProgram out of a provided script public key 45 | pub fn from_scriptpubkey(pubkey: &[u8]) -> Result { 46 | // We need a version byte and a program length byte, with a program at 47 | // least 2 bytes long. 48 | if pubkey.len() < 4 { 49 | return Err("TooShort") 50 | } 51 | let proglen: usize = pubkey[1] as usize; 52 | // Check that program length byte is consistent with pubkey length 53 | if pubkey.len() != 2 + proglen { 54 | return Err("InvalidLengthByte") 55 | } 56 | // Process script version 57 | let mut v: u8 = pubkey[0]; 58 | if v > 0x50 { 59 | v -= 0x50; 60 | } 61 | let program = &pubkey[2..]; 62 | Ok(WitnessProgram { version: v, program: program.to_vec() }) 63 | } 64 | } 65 | 66 | const SEP: u8 = b'1'; 67 | const ALPHABET: &'static [u8] = b"qpzry9x8gf2tvdw0s3jn54khce6mua7l"; 68 | 69 | pub trait Bech32 { 70 | fn encode(&self, hrp: Vec) -> Result, &'static str>; 71 | } 72 | 73 | impl Bech32 for [u8] { 74 | fn encode(&self, hrp: Vec) -> Result, &'static str> { 75 | if hrp.len() < 1 { 76 | return Err("invalidData") 77 | } 78 | 79 | let mut combined: Vec = self.clone().to_vec(); 80 | combined.extend_from_slice(&create_checksum(&hrp, &self.to_vec())); 81 | let mut encoded = hrp; 82 | encoded.push(SEP); 83 | for p in combined { 84 | if p >= 32 { 85 | return Err("invalidData") 86 | } 87 | encoded.push(ALPHABET[p as usize]); 88 | } 89 | Ok(encoded) 90 | } 91 | } 92 | 93 | const GEN: [u32; 5] = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]; 94 | 95 | fn hrp_expand(hrp: &Vec) -> Vec { 96 | let mut v: Vec = Vec::new(); 97 | for b in hrp { 98 | v.push(*b >> 5); 99 | } 100 | v.push(0); 101 | for b in hrp { 102 | v.push(*b & 0x1f); 103 | } 104 | v 105 | } 106 | 107 | fn create_checksum(hrp: &Vec, data: &Vec) -> Vec { 108 | let mut values: Vec = hrp_expand(hrp); 109 | values.extend_from_slice(data); 110 | // Pad with 6 zeros 111 | values.extend_from_slice(&[0u8; 6]); 112 | let plm: u32 = polymod(values) ^ 1; 113 | let mut checksum: Vec = Vec::new(); 114 | for p in 0..6 { 115 | checksum.push(((plm >> 5 * (5 - p)) & 0x1f) as u8); 116 | } 117 | checksum 118 | } 119 | 120 | fn polymod(values: Vec) -> u32 { 121 | let mut chk: u32 = 1; 122 | let mut b: u8; 123 | for v in values { 124 | b = (chk >> 25) as u8; 125 | chk = (chk & 0x1ffffff) << 5 ^ (v as u32); 126 | for i in 0..5 { 127 | if (b >> i) & 1 == 1 { 128 | chk ^= GEN[i] 129 | } 130 | } 131 | } 132 | chk 133 | } 134 | 135 | /// A trait for converting a value to base58 encoded string. 136 | pub trait ToBase32 { 137 | /// Converts a value of `self` to a base58 value, returning the owned string. 138 | fn to_base32(&self) -> Vec; 139 | } 140 | 141 | impl ToBase32 for [u8] { 142 | // /// Convert between bit sizes 143 | // fn to_base32(&self) -> Vec { 144 | // let from: u32 = 8; 145 | // let to: u32 = 5; 146 | 147 | // let mut acc: u32 = 0; 148 | // let mut bits: u32 = 0; 149 | // let mut ret: Vec = Vec::new(); 150 | // let maxv: u32 = (1<= to { 157 | // bits -= to; 158 | // ret.push(((acc >> bits) & maxv) as u8); 159 | // } 160 | // } 161 | // if bits > 0 { 162 | // ret.push(((acc << (to - bits)) & maxv) as u8); 163 | // } 164 | 165 | // ret 166 | // } 167 | 168 | fn to_base32(&self) -> Vec { 169 | // Amount of bits left over from last round, stored in buffer. 170 | let mut buffer_bits = 0u32; 171 | // Holds all unwritten bits left over from last round. The bits are stored beginning from 172 | // the most significant bit. E.g. if buffer_bits=3, then the byte with bits a, b and c will 173 | // look as follows: [a, b, c, 0, 0, 0, 0, 0] 174 | let mut buffer: u8 = 0; 175 | 176 | let mut result = Vec::new(); 177 | 178 | for b in self.into_iter() { 179 | // Write first u5 if we have to write two u5s this round. That only happens if the 180 | // buffer holds too many bits, so we don't have to combine buffer bits with new bits 181 | // from this rounds byte. 182 | if buffer_bits >= 5 { 183 | result.push((buffer & 0b1111_1000) >> 3); 184 | buffer <<= 5; 185 | buffer_bits -= 5; 186 | } 187 | 188 | // Combine all bits from buffer with enough bits from this rounds byte so that they fill 189 | // a u5. Save remaining bits from byte to buffer. 190 | let from_buffer = buffer >> 3; 191 | let from_byte = b >> (3 + buffer_bits); // buffer_bits <= 4 192 | 193 | result.push(from_buffer | from_byte); 194 | buffer = b << (5 - buffer_bits); 195 | buffer_bits += 3; 196 | } 197 | 198 | // There can be at most two u5s left in the buffer after processing all bytes, write them. 199 | if buffer_bits >= 5 { 200 | result.push((buffer & 0b1111_1000) >> 3); 201 | buffer <<= 5; 202 | buffer_bits -= 5; 203 | } 204 | 205 | if buffer_bits != 0 { 206 | result.push(buffer >> 3); 207 | } 208 | 209 | result 210 | } 211 | } 212 | 213 | #[cfg(test)] 214 | mod tests { 215 | use super::*; 216 | use std::str::from_utf8; 217 | 218 | #[test] 219 | fn test_to_base32_basic() { 220 | assert_eq!( 221 | from_utf8(&vec![0x00, 0x01, 0x02].encode(b"bech32".to_vec()).unwrap()).unwrap(), 222 | "bech321qpz4nc4pe" 223 | ); 224 | } 225 | 226 | #[test] 227 | fn valid_address() { 228 | let pairs: Vec<(&str, Vec)> = vec![ 229 | ( 230 | "BC1QW508D6QEJXTDG4Y5R3ZARVARY0C5XW7KV8F3T4", 231 | vec![ 232 | 0x00, 0x14, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c, 233 | 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6, 234 | ], 235 | ), 236 | ( 237 | "bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx", 238 | vec![ 239 | 0x51, 0x28, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c, 240 | 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6, 0x75, 0x1e, 0x76, 0xe8, 241 | 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c, 0x45, 0xd1, 0xb3, 0xa3, 0x23, 0xf1, 242 | 0x43, 0x3b, 0xd6, 243 | ], 244 | ), 245 | ("BC1SW50QA3JX3S", vec![0x60, 0x02, 0x75, 0x1e]), 246 | ( 247 | "bc1zw508d6qejxtdg4y5r3zarvaryvg6kdaj", 248 | vec![ 249 | 0x52, 0x10, 0x75, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c, 250 | 0x45, 0xd1, 0xb3, 0xa3, 0x23, 251 | ], 252 | ), 253 | ]; 254 | for p in pairs { 255 | let (address, scriptpubkey) = p; 256 | 257 | let hrp = b"bc".to_vec(); 258 | 259 | let spk_result = WitnessProgram::from_scriptpubkey(&scriptpubkey); 260 | assert!(spk_result.is_ok()); 261 | let prog = spk_result.unwrap(); 262 | 263 | let enc_result = prog.to_address(hrp); 264 | assert!(enc_result.is_ok()); 265 | 266 | let enc_address = enc_result.unwrap(); 267 | assert_eq!(address.to_lowercase(), from_utf8(&enc_address).unwrap().to_lowercase()); 268 | } 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /pallets/nft/src/mock.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate as nft; 3 | use frame_support::{assert_noop, assert_ok}; 4 | use frame_support::{ 5 | parameter_types, 6 | traits::{OnFinalize, OnInitialize}, 7 | }; 8 | use frame_system as system; 9 | use sp_core::H256; 10 | use sp_runtime::{ 11 | generic, 12 | traits::{BlakeTwo256, IdentityLookup}, 13 | AccountId32, 14 | }; 15 | use sp_std::any::{Any, TypeId}; 16 | 17 | type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; 18 | type Block = frame_system::mocking::MockBlock; 19 | 20 | // Configure a mock runtime to test the pallet. 21 | frame_support::construct_runtime!( 22 | pub enum Test where 23 | Block = Block, 24 | NodeBlock = Block, 25 | UncheckedExtrinsic = UncheckedExtrinsic, 26 | { 27 | System: frame_system::{Pallet, Call, Config, Storage, Event}, 28 | Balances: pallet_balances::{Pallet, Call, Storage, Config, Event}, 29 | OrmlNFT: orml_nft::{Pallet, Storage, Config}, 30 | Nft: nft::{Pallet, Call, Storage, Event}, 31 | } 32 | ); 33 | 34 | parameter_types! { 35 | pub const BlockHashCount: u32 = 250; 36 | pub const SS58Prefix: u8 = 42; 37 | } 38 | 39 | impl system::Config for Test { 40 | type BaseCallFilter = (); 41 | type Origin = Origin; 42 | type BlockWeights = (); 43 | type BlockLength = (); 44 | type DbWeight = (); 45 | type Call = Call; 46 | type Index = u32; 47 | type BlockNumber = u32; 48 | type Hash = H256; 49 | type Hashing = BlakeTwo256; 50 | type AccountId = AccountId32; 51 | type Lookup = IdentityLookup; 52 | type Header = generic::Header; 53 | type Event = Event; 54 | type BlockHashCount = BlockHashCount; 55 | type Version = (); 56 | type PalletInfo = PalletInfo; 57 | type AccountData = pallet_balances::AccountData; 58 | type OnNewAccount = (); 59 | type OnKilledAccount = (); 60 | type SystemWeightInfo = (); 61 | type SS58Prefix = SS58Prefix; 62 | type OnSetCode = (); 63 | } 64 | 65 | parameter_types! { 66 | pub const ExistentialDeposit: u64 = 1; 67 | pub const MaxLocks: u32 = 10; 68 | } 69 | 70 | impl pallet_balances::Config for Test { 71 | type MaxReserves = (); 72 | type ReserveIdentifier = [u8; 8]; 73 | type MaxLocks = MaxLocks; 74 | type Balance = u64; 75 | type Event = Event; 76 | type DustRemoval = (); 77 | type ExistentialDeposit = ExistentialDeposit; 78 | type AccountStore = System; 79 | type WeightInfo = (); 80 | } 81 | 82 | parameter_types! { 83 | pub const ClassCreationFee: u32 = CREATION_FEE; 84 | pub const Pot: AccountId32 = AccountId32::new([9u8; 32]); 85 | } 86 | 87 | impl nft::Config for Test { 88 | type Currency = Balances; 89 | type Event = Event; 90 | type WeightInfo = (); 91 | type ClassCreationFee = ClassCreationFee; 92 | type Pot = Pot; 93 | } 94 | 95 | parameter_types! { 96 | pub const MaxClassMetadata: u32 = 1024; 97 | pub const MaxTokenMetadata: u32 = 1024; 98 | } 99 | 100 | impl orml_nft::Config for Test { 101 | type ClassId = u32; 102 | type TokenId = u64; 103 | type ClassData = ClassData, ClassIdOf>; 104 | type TokenData = TokenData; 105 | type MaxClassMetadata = MaxClassMetadata; 106 | type MaxTokenMetadata = MaxTokenMetadata; 107 | } 108 | 109 | pub type NftError = nft::Error; 110 | 111 | // Build genesis storage according to the mock runtime. 112 | pub fn new_test_ext() -> sp_io::TestExternalities { 113 | system::GenesisConfig::default().build_storage::().unwrap().into() 114 | } 115 | 116 | pub fn run_to_block(n: u32) { 117 | while System::block_number() < n { 118 | >::on_finalize(System::block_number()); 119 | >::on_finalize(System::block_number()); 120 | System::set_block_number(System::block_number() + 1); 121 | >::on_initialize(System::block_number()); 122 | >::on_initialize(System::block_number()); 123 | } 124 | } 125 | 126 | /// Put Event type as T, this method filters the system events storage accordingly 127 | /// 128 | /// Type Parameters: 129 | /// - `T`: Event/ pallet Event type 130 | /// - Example: TypeId::of:: : global event, it will not reject anything and always return true. 131 | /// Event::System : Event of frame_system 132 | /// Event::Balances : Event of pallet_balances 133 | /// Event::Nft: Event ofself crate 134 | /// _ : return empty vector 135 | pub fn events_filter() -> Vec { 136 | let mut evt = System::events(); 137 | 138 | evt.retain(|evt| if_right_events::(&evt.event)); 139 | return evt.into_iter().map(|evt| evt.event).collect::>(); 140 | } 141 | 142 | /// return true if Event is an instance of T 143 | /// 144 | /// Parameters: 145 | /// - `evt`: Event 146 | /// Type Parameters: 147 | /// - `T`: Event/ pallet Event type 148 | /// - Example: TypeId::of:: : global event, it will not reject anything and always return true. 149 | /// Event::System : Event of frame_system 150 | /// Event::Balances : Event of pallet_balances 151 | /// Event::Nft: Event ofself crate 152 | /// 153 | /// Ormal_NFT is also tested but no imported Event so far. 154 | pub fn if_right_events(evt: &Event) -> bool { 155 | if TypeId::of::() == TypeId::of::() { 156 | return true; 157 | } else { 158 | match evt { 159 | Event::System(i) => return if_right_raw_events::(i), 160 | Event::Balances(i) => return if_right_raw_events::(i), 161 | Event::Nft(i) => return if_right_raw_events::(i), 162 | } 163 | } 164 | } 165 | 166 | /// return true if s is an instance of T 167 | /// 168 | /// Parameters: 169 | /// - `s`: Any 170 | /// Type Parameters: 171 | /// - `T`: type 172 | pub fn if_right_raw_events(s: &dyn Any) -> bool { 173 | if let Some(_) = s.downcast_ref::() { 174 | true 175 | } else { 176 | false 177 | } 178 | } 179 | 180 | #[test] 181 | fn check_test_helper() { 182 | let evt = Event::System(frame_system::Event::NewAccount(AccountId32::from([0u8; 32]))); 183 | assert_eq!(if_right_events::>(&evt), true); 184 | 185 | let evt = Event::Balances(pallet_balances::Event::::Transfer( 186 | AccountId32::from([0u8; 32]), 187 | AccountId32::from([1u8; 32]), 188 | CREATION_FEE.into(), 189 | )); 190 | assert_eq!(if_right_events::>(&evt), true); 191 | 192 | let evt = Event::Nft(crate::Event::CreatedClass(AccountId32::from([0u8; 32]), 0)); 193 | assert_eq!(if_right_events::>(&evt), true); 194 | } 195 | 196 | #[test] 197 | fn demostration_of_event_filter() { 198 | new_test_ext().execute_with(|| { 199 | let alice_account: AccountId32 = AccountId32::from([ 200 | 0xd4, 0x35, 0x93, 0xc7, 0x15, 0xfd, 0xd3, 0x1c, 0x61, 0x14, 0x1a, 0xbd, 0x04, 0xa9, 201 | 0x9f, 0xd6, 0x82, 0x2c, 0x85, 0x58, 0x85, 0x4c, 0xcd, 0xe3, 0x9a, 0x56, 0x84, 0xe7, 202 | 0xa5, 0x6d, 0xa2, 0x7d, 203 | ]); 204 | 205 | run_to_block(1); 206 | assert_eq!(System::block_number(), 1); 207 | 208 | // give balance to Alice 209 | let _ = Balances::deposit_creating(&alice_account, (CREATION_FEE + 10).into()); 210 | // issue a simple class 211 | assert_ok!(Nft::create_class( 212 | Origin::signed(alice_account.clone()), 213 | CID::default(), 214 | Properties::default(), 215 | None, 216 | None, 217 | ClassType::Simple(100), 218 | )); 219 | 220 | // > type argument: give the events belong to frame_system only 221 | assert_eq!( 222 | events_filter::>(), 223 | [ 224 | Event::System(frame_system::Event::NewAccount(alice_account.clone())), 225 | Event::System(frame_system::Event::NewAccount(Pot::get())), 226 | ] 227 | ); 228 | 229 | // > type argument: give the events belong to pallet_balances only 230 | assert_eq!( 231 | events_filter::>(), 232 | [ 233 | Event::Balances(pallet_balances::Event::::Endowed( 234 | alice_account.clone(), 235 | (CREATION_FEE + 10).into() 236 | )), 237 | Event::Balances(pallet_balances::Event::::Endowed( 238 | Pot::get(), 239 | CREATION_FEE.into() 240 | )), 241 | Event::Balances(pallet_balances::Event::::Transfer( 242 | alice_account.clone(), 243 | Pot::get(), 244 | CREATION_FEE.into() 245 | )), 246 | ] 247 | ); 248 | 249 | // > type argument: which in our case, crate is our nft crate, give the events belong to self-design events only 250 | assert_eq!( 251 | events_filter::>(), 252 | [Event::Nft(crate::Event::CreatedClass(alice_account.clone(), 0)),] 253 | ); 254 | 255 | // argument: Event is the general type, give all events 256 | assert_eq!( 257 | events_filter::(), 258 | [ 259 | Event::System(frame_system::Event::NewAccount(alice_account.clone())), 260 | Event::Balances(pallet_balances::Event::::Endowed( 261 | alice_account.clone(), 262 | (CREATION_FEE + 10).into() 263 | )), 264 | Event::System(frame_system::Event::NewAccount(Pot::get())), 265 | Event::Balances(pallet_balances::Event::::Endowed( 266 | Pot::get(), 267 | CREATION_FEE.into() 268 | )), 269 | Event::Balances(pallet_balances::Event::::Transfer( 270 | alice_account.clone(), 271 | Pot::get(), 272 | CREATION_FEE.into() 273 | )), 274 | Event::Nft(crate::Event::CreatedClass(alice_account.clone(), 0)), 275 | ] 276 | ); 277 | 278 | // get_vector provide event display on index level. negative index will display reversed order element's reference. 279 | assert_eq!( 280 | events_filter::()[4], 281 | Event::Balances(pallet_balances::Event::::Transfer( 282 | alice_account.clone(), 283 | Pot::get(), 284 | CREATION_FEE.into() 285 | )), 286 | ); 287 | }) 288 | } 289 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/tests.rs: -------------------------------------------------------------------------------- 1 | // This file is part of Substrate. 2 | 3 | // Copyright (C) 2020-2021 Parity Technologies (UK) Ltd. 4 | // SPDX-License-Identifier: Apache-2.0 5 | 6 | // Licensed under the Apache License, Version 2.0 (the "License"); 7 | // you may not use this file except in compliance with the License. 8 | // You may obtain a copy of the License at 9 | // 10 | // http://www.apache.org/licenses/LICENSE-2.0 11 | // 12 | // Unless required by applicable law or agreed to in writing, software 13 | // distributed under the License is distributed on an "AS IS" BASIS, 14 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | // See the License for the specific language governing permissions and 16 | // limitations under the License. 17 | 18 | use crate::*; 19 | use crate as offchain_worker; 20 | use frame_support::{parameter_types, weights::Weight}; 21 | use sp_core::{sr25519::Signature, H256}; 22 | use sp_runtime::{ 23 | testing::{Header, TestXt}, 24 | traits::{BlakeTwo256, Extrinsic as ExtrinsicT, IdentifyAccount, IdentityLookup, Verify}, 25 | }; 26 | 27 | type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; 28 | type Block = frame_system::mocking::MockBlock; 29 | 30 | // For testing the module, we construct a mock runtime. 31 | frame_support::construct_runtime!( 32 | pub enum Test where 33 | Block = Block, 34 | NodeBlock = Block, 35 | UncheckedExtrinsic = UncheckedExtrinsic, 36 | { 37 | System: frame_system::{Pallet, Call, Config, Storage, Event}, 38 | Balances: pallet_balances::{Pallet, Call, Storage, Config, Event}, 39 | AccountLinker: account_linker::{Pallet, Call, Storage, Event}, 40 | OffchainWorker: offchain_worker::{Pallet, Call, Storage, Event,}, 41 | } 42 | ); 43 | 44 | parameter_types! { 45 | pub const BlockHashCount: u64 = 250; 46 | pub BlockWeights: frame_system::limits::BlockWeights = 47 | frame_system::limits::BlockWeights::simple_max(1024); 48 | } 49 | impl frame_system::Config for Test { 50 | type BaseCallFilter = (); 51 | type BlockWeights = (); 52 | type BlockLength = (); 53 | type DbWeight = (); 54 | type Origin = Origin; 55 | type Call = Call; 56 | type Index = u64; 57 | type BlockNumber = u64; 58 | type Hash = H256; 59 | type Hashing = BlakeTwo256; 60 | type AccountId = sp_core::sr25519::Public; 61 | type Lookup = IdentityLookup; 62 | type Header = Header; 63 | type Event = Event; 64 | type BlockHashCount = BlockHashCount; 65 | type Version = (); 66 | type PalletInfo = PalletInfo; 67 | type AccountData = pallet_balances::AccountData; 68 | type OnNewAccount = (); 69 | type OnKilledAccount = (); 70 | type SystemWeightInfo = (); 71 | type SS58Prefix = (); 72 | type OnSetCode = (); 73 | } 74 | 75 | parameter_types! { 76 | pub const ExistentialDeposit: u128 = 500; 77 | } 78 | 79 | impl pallet_balances::Config for Test { 80 | type MaxLocks = (); 81 | /// The type for recording an account's balance. 82 | type Balance = u128; 83 | /// The ubiquitous event type. 84 | type Event = Event; 85 | type DustRemoval = (); 86 | type ExistentialDeposit = ExistentialDeposit; 87 | type AccountStore = System; 88 | type WeightInfo = (); 89 | type MaxReserves = (); 90 | type ReserveIdentifier = [u8; 8]; 91 | } 92 | 93 | type Extrinsic = TestXt; 94 | type AccountId = <::Signer as IdentifyAccount>::AccountId; 95 | 96 | impl frame_system::offchain::SigningTypes for Test { 97 | type Public = ::Signer; 98 | type Signature = Signature; 99 | } 100 | 101 | impl frame_system::offchain::SendTransactionTypes for Test 102 | where 103 | Call: From, 104 | { 105 | type OverarchingCall = Call; 106 | type Extrinsic = Extrinsic; 107 | } 108 | 109 | impl frame_system::offchain::CreateSignedTransaction for Test 110 | where 111 | Call: From, 112 | { 113 | fn create_transaction>( 114 | call: Call, 115 | _public: ::Signer, 116 | _account: AccountId, 117 | nonce: u64, 118 | ) -> Option<(Call, ::SignaturePayload)> { 119 | Some((call, (nonce, ()))) 120 | } 121 | } 122 | 123 | impl account_linker::Config for Test { 124 | type Event = Event; 125 | type WeightInfo = (); 126 | } 127 | 128 | parameter_types! { 129 | pub const QueryTaskRedundancy: u32 = 3; 130 | pub const QuerySessionLength: u32 = 5; 131 | pub const OcwQueryReward: u128 = 1; 132 | pub const MaximumWeightForDataAggregation: Weight = 100_000_000_000; 133 | pub const MaximumCommitsPerSession: u32 = 100; 134 | } 135 | 136 | impl Config for Test { 137 | type AuthorityId = offchain_worker::crypto::TestAuthId; 138 | type Call = Call; 139 | type Event = Event; 140 | type Balance = u128; 141 | type QueryTaskRedundancy = QueryTaskRedundancy; 142 | type QuerySessionLength = QuerySessionLength; 143 | type Currency = Balances; 144 | type Reward = (); 145 | type OcwQueryReward = OcwQueryReward; 146 | type WeightInfo = (); 147 | type MaximumWeightForDataAggregation = MaximumWeightForDataAggregation; 148 | type MaximumCommitsPerSession = MaximumCommitsPerSession; 149 | } 150 | 151 | #[test] 152 | fn test_chars_to_u128() { 153 | let correct_balance = vec![ 154 | '5', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', 155 | ]; 156 | assert_eq!(Ok(500000000000000000_u128), utils::chars_to_u128(&correct_balance)); 157 | 158 | let correct_balance = vec!['a', '2']; 159 | assert_eq!(Err("Wrong u128 balance data format"), utils::chars_to_u128(&correct_balance)); 160 | 161 | let correct_balance = vec!['0', 'x', 'f', 'e']; 162 | assert_eq!(Ok(254_u128), utils::chars_to_u128(&correct_balance)); 163 | 164 | // Corner case check 165 | let correct_balance = vec!['0', 'x']; 166 | assert_eq!(Ok(0_u128), utils::chars_to_u128(&correct_balance)); 167 | } 168 | 169 | #[test] 170 | fn test_parse_etherscan_balances() { 171 | let double_balances = r#" 172 | { 173 | "status": "1", 174 | "message": "OK", 175 | "result": 176 | [ 177 | {"account":"0x742d35Cc6634C0532925a3b844Bc454e4438f44e","balance":"12"}, 178 | {"account":"0xBE0eB53F46cd790Cd13851d5EFf43D12404d33E8","balance":"21"} 179 | ] 180 | }"#; 181 | assert_eq!(Some(vec![12, 21]), urls::parse_etherscan_balances(double_balances)); 182 | } 183 | 184 | #[test] 185 | fn test_parse_etherscan_balances_2() { 186 | let double_balances = r#" 187 | { 188 | "status": "1", 189 | "message": "OK", 190 | "result": 191 | [ 192 | {"account":"0x742d35Cc6634C0532925a3b844Bc454e4438f44e","balance":"12"}, 193 | {"account":"0xBE0eB53F46cd790Cd13851d5EFf43D12404d33E8","balance":"21"} 194 | ] 195 | }"#; 196 | 197 | let token_info: urls::EtherScanResponse = serde_json::from_str(&double_balances).unwrap(); 198 | assert_eq!(token_info.status, "1".as_bytes().to_vec()); 199 | assert_eq!(token_info.result[0].balance, "12".as_bytes().to_vec()); 200 | } 201 | 202 | #[test] 203 | fn test_parse_blockchain_info_balances() { 204 | let double_balances = r#" 205 | { 206 | "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa":{"final_balance":30,"n_tx":2635,"total_received":6835384571}, 207 | "15EW3AMRm2yP6LEF5YKKLYwvphy3DmMqN6":{"final_balance":1220,"n_tx":4,"total_received":310925609} 208 | }"#; 209 | let result = urls::parse_blockchain_info_balances(double_balances); 210 | assert_eq!(true, (Some(vec![30, 1220]) == result || Some(vec![1220, 30]) == result)); 211 | 212 | // Test case should fail because fraction of the first balance value is non zero 213 | let double_balances = r#" 214 | { 215 | "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa":{"final_balance":30.5,"n_tx":2635,"total_received":6835384571}, 216 | "15EW3AMRm2yP6LEF5YKKLYwvphy3DmMqN6":{"final_balance":1220,"n_tx":4,"total_received":310925609} 217 | }"#; 218 | assert_eq!(Some(vec![1220]), urls::parse_blockchain_info_balances(double_balances)); 219 | 220 | // Test case should fail because first balance value is negative 221 | let double_balances = r#" 222 | { 223 | "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa":{"final_balance":-30,"n_tx":2635,"total_received":6835384571}, 224 | "15EW3AMRm2yP6LEF5YKKLYwvphy3DmMqN6":{"final_balance":1220,"n_tx":4,"total_received":310925609} 225 | }"#; 226 | assert_eq!(Some(vec![1220]), urls::parse_blockchain_info_balances(double_balances)); 227 | } 228 | 229 | #[test] 230 | fn test_parse_infura_balances() { 231 | let double_balances = r#" 232 | [ 233 | {"jsonrpc":"2.0","id":1,"result":"0x4563918244f40000"}, 234 | {"jsonrpc":"2.0","id":1,"result":"0xff"} 235 | ] 236 | "#; 237 | 238 | assert_eq!(Some(vec![5000000000000000000, 255]), urls::parse_infura_balances(double_balances)); 239 | } 240 | 241 | #[test] 242 | fn test_parse_infura_balances_2() { 243 | let double_balances = r#" 244 | [ 245 | {"jsonrpc":"2.0","id":1,"result":"0x4563918244f40000"}, 246 | {"jsonrpc":"2.0","id":1,"result":"0xff"} 247 | ] 248 | "#; 249 | let token_info: Vec = serde_json::from_str(double_balances).unwrap(); 250 | assert_eq!(token_info[0].id, 1); 251 | } 252 | 253 | // fetch_balances only executed in offchain worker context, need investigate how to call it in test 254 | // #[test] 255 | // fn test_fetch_balances() { 256 | // let get = urls::HttpGet { 257 | // blockchain: urls::BlockChainType::ETH, 258 | // prefix: "https://api-ropsten.etherscan.io/api?module=account&action=balancemulti&address=0x", 259 | // delimiter: ",0x", 260 | // postfix: "&tag=latest&apikey=", 261 | // api_token: "RF71W4Z2RDA7XQD6EN19NGB66C2QD9UPHB", 262 | // }; 263 | 264 | // let test_account = "4d88dc5D528A33E4b8bE579e9476715F60060582".as_bytes(); 265 | // let mut test_account_byte_array = [0u8; 20]; 266 | // test_account_byte_array.copy_from_slice(&test_account[0..20]); 267 | 268 | // let mut accounts: Vec<[u8; 20]> = Vec::new(); 269 | // accounts.push(test_account_byte_array); 270 | 271 | // sp_io::TestExternalities::default().execute_with(|| { 272 | // match >::fetch_balances(accounts, urls::HttpRequest::GET(get), &urls::parse_etherscan_balances) { 273 | // Ok(b) => assert_eq!(500000000000000000_u128, b), 274 | // Err(_) => panic!("Error occurs in test_fetch_balance!!"), 275 | // }; 276 | // }); 277 | // } 278 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/urls.rs: -------------------------------------------------------------------------------- 1 | use super::utils; 2 | use alt_serde::{Deserialize, Deserializer}; 3 | use codec::{Decode, Encode}; 4 | use core::fmt; 5 | use sp_runtime::offchain::{http, storage::StorageValueRef}; 6 | use sp_std::prelude::*; 7 | 8 | /// Asset type 9 | #[derive(Encode, Decode, Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)] 10 | pub enum BlockChainType { 11 | /// invalid 12 | Invalid, 13 | /// eth token 14 | ETH, 15 | /// bitcoin 16 | BTC, 17 | } 18 | 19 | impl Default for BlockChainType { 20 | fn default() -> Self { 21 | BlockChainType::Invalid 22 | } 23 | } 24 | 25 | /// Eth source enum 26 | #[derive(Encode, Decode, Copy, Clone, Debug, PartialEq)] 27 | pub enum DataSource { 28 | /// invalid 29 | Invalid, 30 | /// etherscan 31 | EthEtherScan, 32 | /// infura 33 | EthInfura, 34 | /// blockchain 35 | BtcBlockChain, 36 | } 37 | 38 | pub const TOTAL_DATA_SOURCE_NUMBER: u32 = 3; 39 | pub const DATA_SOURCE_LIST: [DataSource; TOTAL_DATA_SOURCE_NUMBER as usize] = 40 | [DataSource::EthEtherScan, DataSource::EthInfura, DataSource::BtcBlockChain]; 41 | 42 | impl Default for DataSource { 43 | fn default() -> Self { 44 | DataSource::Invalid 45 | } 46 | } 47 | 48 | /// Data source to blockchain type 49 | pub fn data_source_to_index(data_source: DataSource) -> u32 { 50 | match data_source { 51 | DataSource::Invalid => u32::MAX, 52 | DataSource::EthEtherScan => 0, 53 | DataSource::EthInfura => 1, 54 | DataSource::BtcBlockChain => 2, 55 | } 56 | } 57 | 58 | /// Data source to blockchain type 59 | pub fn data_source_to_block_chain_type(data_source: DataSource) -> BlockChainType { 60 | match data_source { 61 | DataSource::Invalid => BlockChainType::Invalid, 62 | DataSource::EthEtherScan => BlockChainType::ETH, 63 | DataSource::EthInfura => BlockChainType::ETH, 64 | DataSource::BtcBlockChain => BlockChainType::BTC, 65 | } 66 | } 67 | 68 | /// Http Get URL structure 69 | pub struct HttpGet<'a> { 70 | pub blockchain: BlockChainType, 71 | // URL affix 72 | pub prefix: &'a str, 73 | pub delimiter: &'a str, 74 | pub postfix: &'a str, 75 | pub api_token: &'a str, 76 | } 77 | 78 | /// Http Post URL structure 79 | pub struct HttpPost<'a> { 80 | pub blockchain: BlockChainType, 81 | // URL affix 82 | pub url_main: &'a str, 83 | pub api_token: &'a str, 84 | // Body affix 85 | pub prefix: &'a str, 86 | pub delimiter: &'a str, 87 | pub postfix: &'a str, 88 | } 89 | 90 | /// Request enum to wrap up both get and post method 91 | pub enum HttpRequest<'a> { 92 | GET(HttpGet<'a>), 93 | POST(HttpPost<'a>), 94 | } 95 | 96 | /// Store all API tokens for offchain worker to send request to website 97 | #[derive(Deserialize, Encode, Decode, Default)] 98 | #[serde(crate = "alt_serde")] 99 | pub struct TokenInfo { 100 | /// API token for etherscan service 101 | #[serde(deserialize_with = "de_string_to_bytes")] 102 | pub etherscan: Vec, 103 | /// API token for infura service 104 | #[serde(deserialize_with = "de_string_to_bytes")] 105 | pub infura: Vec, 106 | /// API token for blockchain.info website 107 | #[serde(deserialize_with = "de_string_to_bytes")] 108 | pub blockchain: Vec, 109 | } 110 | 111 | /// Balances data embedded in etherscan response 112 | #[derive(Deserialize, Encode, Decode, Default)] 113 | #[serde(crate = "alt_serde")] 114 | pub struct EtherScanBalance { 115 | /// Ethereum account 116 | #[serde(deserialize_with = "de_string_to_bytes")] 117 | pub account: Vec, 118 | /// Eth balance 119 | #[serde(deserialize_with = "de_string_to_bytes")] 120 | pub balance: Vec, 121 | } 122 | 123 | /// Response data from etherscan 124 | #[derive(Deserialize, Encode, Decode, Default)] 125 | #[serde(crate = "alt_serde")] 126 | pub struct EtherScanResponse { 127 | /// Http response status 128 | #[serde(deserialize_with = "de_string_to_bytes")] 129 | pub status: Vec, 130 | /// Http response message 131 | #[serde(deserialize_with = "de_string_to_bytes")] 132 | pub message: Vec, 133 | /// Ethereum account and its balance 134 | pub result: Vec, 135 | } 136 | 137 | /// Balances data from Infura service 138 | #[derive(Deserialize, Encode, Decode, Default)] 139 | #[serde(crate = "alt_serde")] 140 | pub struct InfuraBalance { 141 | /// Json RPV version 142 | #[serde(deserialize_with = "de_string_to_bytes")] 143 | pub jsonrpc: Vec, 144 | /// Query ID 145 | pub id: u32, 146 | /// Balance data 147 | #[serde(deserialize_with = "de_string_to_bytes")] 148 | pub result: Vec, 149 | } 150 | 151 | /// Response from Infura 152 | #[derive(Deserialize, Encode, Decode, Default)] 153 | #[serde(crate = "alt_serde")] 154 | pub struct InfuraResponse { 155 | /// Response vector for several Ethreum account 156 | pub response: Vec, 157 | } 158 | 159 | /// Deserialize string to Vec 160 | pub fn de_string_to_bytes<'de, D>(de: D) -> Result, D::Error> 161 | where 162 | D: Deserializer<'de>, 163 | { 164 | let s: &str = Deserialize::deserialize(de)?; 165 | Ok(s.as_bytes().to_vec()) 166 | } 167 | 168 | /// Implement Debug trait for print TokenInfo 169 | impl fmt::Debug for TokenInfo { 170 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 171 | write!( 172 | f, 173 | "{{ etherscan: {}, infura: {}, blockchain: {} }}", 174 | sp_std::str::from_utf8(&self.etherscan).map_err(|_| fmt::Error)?, 175 | sp_std::str::from_utf8(&self.infura).map_err(|_| fmt::Error)?, 176 | sp_std::str::from_utf8(&self.blockchain).map_err(|_| fmt::Error)?, 177 | ) 178 | } 179 | } 180 | 181 | // Fetch json result from remote URL with get method 182 | pub fn fetch_json_http_get<'a>(remote_url: &'a [u8]) -> Result, &'static str> { 183 | let remote_url_str = 184 | core::str::from_utf8(remote_url).map_err(|_| "Error in converting remote_url to string")?; 185 | 186 | let pending = http::Request::get(remote_url_str) 187 | .send() 188 | .map_err(|_| "Error in sending http GET request")?; 189 | 190 | let response = pending.wait().map_err(|_| "Error in waiting http response back")?; 191 | 192 | if response.code != 200 { 193 | log::warn!("Unexpected status code: {}", response.code); 194 | return Err("Non-200 status code returned from http request") 195 | } 196 | 197 | let json_result: Vec = response.body().collect::>(); 198 | 199 | let balance = 200 | core::str::from_utf8(&json_result).map_err(|_| "JSON result cannot convert to string")?; 201 | 202 | Ok(balance.as_bytes().to_vec()) 203 | } 204 | 205 | // Fetch json result from remote URL with post method 206 | pub fn fetch_json_http_post<'a>( 207 | remote_url: &'a [u8], 208 | body: &'a [u8], 209 | ) -> Result, &'static str> { 210 | let remote_url_str = 211 | core::str::from_utf8(remote_url).map_err(|_| "Error in converting remote_url to string")?; 212 | 213 | log::info!("Offchain Worker post request url is {}.", remote_url_str); 214 | 215 | let pending = http::Request::post(remote_url_str, vec![body]) 216 | .send() 217 | .map_err(|_| "Error in sending http POST request")?; 218 | 219 | let response = pending.wait().map_err(|_| "Error in waiting http response back")?; 220 | 221 | if response.code != 200 { 222 | log::warn!("Unexpected status code: {}", response.code); 223 | return Err("Non-200 status code returned from http request") 224 | } 225 | 226 | let json_result: Vec = response.body().collect::>(); 227 | 228 | let balance = 229 | core::str::from_utf8(&json_result).map_err(|_| "JSON result cannot convert to string")?; 230 | 231 | Ok(balance.as_bytes().to_vec()) 232 | } 233 | 234 | // Send request to local server for query api tokens 235 | pub fn send_get_token() -> Result, &'static str> { 236 | let pending = http::Request::get(super::TOKEN_SERVER_URL) 237 | .send() 238 | .map_err(|_| "Error in sending http GET request")?; 239 | 240 | let response = pending.wait().map_err(|_| "Error in waiting http response back")?; 241 | 242 | if response.code != 200 { 243 | log::warn!("Unexpected status code: {}", response.code); 244 | return Err("Non-200 status code returned from http request") 245 | } 246 | 247 | let json_result: Vec = response.body().collect::>(); 248 | 249 | Ok(json_result) 250 | } 251 | 252 | // Get the API tokens from local server 253 | pub fn get_token() -> Result<(), &'static str> { 254 | let json_result = send_get_token()?; 255 | match core::str::from_utf8(&json_result) { 256 | Ok(balance) => parse_store_tokens(balance), 257 | Err(_) => Err("Error occurred while converting from raw bytes to string"), 258 | } 259 | } 260 | 261 | #[allow(dead_code)] 262 | // Parse the balance from etherscan response 263 | pub fn parse_etherscan_balances(price_str: &str) -> Option> { 264 | // { 265 | // "status": "1", 266 | // "message": "OK", 267 | // "result": 268 | // [ 269 | // {"account":"0x742d35Cc6634C0532925a3b844Bc454e4438f44e","balance":"3804372455842738500000001"}, 270 | // {"account":"0xBE0eB53F46cd790Cd13851d5EFf43D12404d33E8","balance":"2571179226430511381996287"} 271 | // ] 272 | // } 273 | log::info!("Offchain Worker response from etherscan is {:?}", price_str); 274 | 275 | let token_info: EtherScanResponse = serde_json::from_str(price_str).ok()?; 276 | let result: Vec = token_info 277 | .result 278 | .iter() 279 | .map(|item| { 280 | match utils::chars_to_u128(&item.balance.iter().map(|i| *i as char).collect()) { 281 | Ok(balance) => balance, 282 | Err(_) => 0_u128, 283 | } 284 | }) 285 | .collect(); 286 | Some(result) 287 | } 288 | 289 | #[allow(dead_code)] 290 | // Parse balances from blockchain info response 291 | pub fn parse_blockchain_info_balances(price_str: &str) -> Option> { 292 | // { 293 | // "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa":{"final_balance":6835384571,"n_tx":2635,"total_received":6835384571}, 294 | // "15EW3AMRm2yP6LEF5YKKLYwvphy3DmMqN6":{"final_balance":0,"n_tx":4,"total_received":310925609} 295 | // } 296 | let mut balance_vec: Vec = Vec::new(); 297 | 298 | let value: serde_json::Value = serde_json::from_str(price_str).ok()?; 299 | 300 | match value { 301 | serde_json::Value::Object(map_data) => 302 | for (_, v) in map_data.iter() { 303 | match v["final_balance"].as_u64() { 304 | Some(balance) => balance_vec.push(balance as u128), 305 | None => (), 306 | } 307 | }, 308 | _ => (), 309 | }; 310 | 311 | Some(balance_vec) 312 | } 313 | 314 | #[allow(dead_code)] 315 | // Parse the balance from infura response 316 | pub fn parse_infura_balances(price_str: &str) -> Option> { 317 | //[ 318 | // {"jsonrpc":"2.0","id":1,"result":"0x4563918244f40000"}, 319 | // {"jsonrpc":"2.0","id":1,"result":"0xff"} 320 | //] 321 | 322 | let token_info: Vec = serde_json::from_str(price_str).ok()?; 323 | let result: Vec = token_info 324 | .iter() 325 | .map(|item| match utils::chars_to_u128(&item.result.iter().map(|i| *i as char).collect()) { 326 | Ok(balance) => balance, 327 | Err(_) => 0_u128, 328 | }) 329 | .collect(); 330 | Some(result) 331 | } 332 | 333 | // Parse the token from local server 334 | pub fn parse_store_tokens(resp_str: &str) -> Result<(), &'static str> { 335 | let token_info: Result = serde_json::from_str(&resp_str); 336 | 337 | match token_info { 338 | Ok(info) => { 339 | let s_info = StorageValueRef::persistent(b"offchain-worker::token"); 340 | s_info.set(&info); 341 | log::info!("Token info get from local server is {:?}.", &info); 342 | Ok(()) 343 | }, 344 | Err(_) => { 345 | log::info!("Error occurred while requesting API keys."); 346 | Err("Error occurred while parsing json string") 347 | }, 348 | } 349 | } 350 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /pallets/account-linker/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # AccountLinker Pallet 2 | //! 3 | //! The AccountLinker pallet provides functionality for linking a Litentry account to account at 4 | //! other networks. (currently support Ethereum (BSC), BTC and Polkadot ecosystem) 5 | //! 6 | //! ## Overview 7 | //! 8 | //! The AccountLinker pallet stores the linking relation between Litentry accounts and accounts at other 9 | //! networks. It also offers extrinscs for user to update the linking relation. For each linking relation, 10 | //! user may choose to freshly link new account or replace an existing linked account with a new provided one. 11 | //! 12 | //! ## Interface 13 | //! 14 | //! ### Dispatchable Functions 15 | //! 16 | //! * `link_eth` - Link an Ethereum address to a Litentry account providing a proof signature 17 | //! from the private key of that Ethereum address. 18 | //! * `link_btc` - Link an BTC address to a Litentry account providing a proof signature 19 | //! from the private key of that BTC address. 20 | //! * `link_polkadot` - Initiate a link request to link a Litentry address to another Litentry address 21 | //! * `accept_polkadot` - Accept a pending `link_polkadot` request to link a Litentry address 22 | //! to another Litentry address. 23 | //! 24 | //! [`Call`]: ./enum.Call.html 25 | //! [`Config`]: ./trait.Config.html 26 | 27 | #![cfg_attr(not(feature = "std"), no_std)] 28 | 29 | pub use pallet::*; 30 | 31 | #[cfg(test)] 32 | mod mock; 33 | 34 | #[cfg(test)] 35 | mod tests; 36 | 37 | mod benchmarking; 38 | mod btc; 39 | mod util_eth; 40 | pub mod weights; 41 | 42 | type EthAddress = [u8; 20]; 43 | // rsv signature 44 | type Signature = [u8; 65]; 45 | 46 | #[frame_support::pallet] 47 | pub mod pallet { 48 | use crate::*; 49 | use btc::{base58::ToBase58, witness::WitnessProgram}; 50 | use codec::Encode; 51 | use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*}; 52 | use frame_system::{ensure_signed, pallet_prelude::*}; 53 | use sp_io::crypto::secp256k1_ecdsa_recover_compressed; 54 | use sp_std::prelude::*; 55 | use weights::WeightInfo; 56 | pub const EXPIRING_BLOCK_NUMBER_MAX: u32 = 10 * 60 * 24 * 30; // 30 days for 6s per block 57 | pub const MAX_ETH_LINKS: usize = 3; 58 | pub const MAX_BTC_LINKS: usize = 3; 59 | pub const MAX_POLKADOT_LINKS: usize = 3; 60 | 61 | enum BTCAddrType { 62 | Legacy, 63 | Segwit, 64 | } 65 | 66 | #[pallet::config] 67 | pub trait Config: frame_system::Config { 68 | type Event: From> + IsType<::Event>; 69 | type WeightInfo: WeightInfo; 70 | } 71 | 72 | #[pallet::event] 73 | #[pallet::generate_deposit(pub(super) fn deposit_event)] 74 | #[pallet::metadata(T::AccountId = "AccountId")] 75 | pub enum Event { 76 | /// Ethereum address successfully linked. \[Lintentry account, Ethereum account\] 77 | EthAddressLinked(T::AccountId, Vec), 78 | /// BTC address successfully linked. \[Lintentry account, BTC account\] 79 | BtcAddressLinked(T::AccountId, Vec), 80 | /// Polkadot address successfully linked. \[Lintentry account, Polkadot account\] 81 | PolkadotAddressLinked(T::AccountId, T::AccountId), 82 | } 83 | 84 | #[pallet::error] 85 | pub enum Error { 86 | // Cannot recover the signature 87 | EcdsaRecoverFailure, 88 | // Link request expired 89 | LinkRequestExpired, 90 | // Provided address mismatch the address recovered from signature recovery 91 | UnexpectedAddress, 92 | // Unexpected ethereum message length error 93 | UnexpectedEthMsgLength, 94 | // Invalid BTC address to link 95 | InvalidBTCAddress, 96 | // Expiration block number is too far away from now 97 | InvalidExpiringBlockNumber, 98 | // Try to resolve a wrong link_polkadot request 99 | WrongPendingRequest, 100 | } 101 | 102 | #[pallet::hooks] 103 | impl Hooks> for Pallet {} 104 | 105 | #[pallet::pallet] 106 | #[pallet::generate_store(pub(super) trait Store)] 107 | pub struct Pallet(_); 108 | 109 | #[pallet::storage] 110 | #[pallet::getter(fn eth_addresses)] 111 | pub(super) type EthereumLink = 112 | StorageMap<_, Blake2_128Concat, T::AccountId, Vec, ValueQuery>; 113 | 114 | #[pallet::storage] 115 | #[pallet::getter(fn btc_addresses)] 116 | pub(super) type BitcoinLink = 117 | StorageMap<_, Blake2_128Concat, T::AccountId, Vec>, ValueQuery>; 118 | 119 | #[pallet::storage] 120 | #[pallet::getter(fn polkadot_addresses)] 121 | pub(super) type PolkadotLink = 122 | StorageMap<_, Blake2_128Concat, T::AccountId, Vec, ValueQuery>; 123 | 124 | #[pallet::storage] 125 | #[pallet::getter(fn polkadot_pending)] 126 | pub(super) type PolkadotPending = 127 | StorageMap<_, Blake2_128Concat, T::AccountId, (T::AccountId, u32), ValueQuery>; 128 | 129 | #[pallet::call] 130 | impl Pallet { 131 | /// Link an Ethereum address to the origin Litentry account providing a proof signature from the private key 132 | /// of that Ethereum address. 133 | /// 134 | /// The runtime needs to ensure that a malicious index can be handled correctly. 135 | /// Currently, when vec.len > MAX_ETH_LINKS, replacement will always happen at the final index. 136 | /// Otherwise it will use the next new slot unless index is valid against a currently available slot. 137 | /// 138 | /// Parameters: 139 | /// - `index`: The index of the linked Ethereum address that the user wants to replace with. 140 | /// - `addr_expected`: The intended Ethereum address to link to the origin's Litentry address 141 | /// - `expiring_block_number`: The block number after which this link request will expire 142 | /// - `sig`: The rsv-signature generated by the private key of the addr_expected 143 | /// 144 | /// Emits `EthAddressLinked` event when successful. 145 | #[pallet::weight(T::WeightInfo::link_eth())] 146 | pub fn link_eth( 147 | origin: OriginFor, 148 | index: u32, 149 | addr_expected: EthAddress, 150 | expiring_block_number: T::BlockNumber, 151 | sig: Signature, 152 | ) -> DispatchResultWithPostInfo { 153 | let account = ensure_signed(origin)?; 154 | 155 | let current_block_number = >::block_number(); 156 | ensure!(expiring_block_number > current_block_number, Error::::LinkRequestExpired); 157 | ensure!( 158 | (expiring_block_number - current_block_number) < 159 | T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), 160 | Error::::InvalidExpiringBlockNumber 161 | ); 162 | 163 | let bytes = Self::generate_raw_message(&account, expiring_block_number); 164 | 165 | let hash = 166 | util_eth::eth_data_hash(bytes).map_err(|_| Error::::UnexpectedEthMsgLength)?; 167 | 168 | let mut msg = [0u8; 32]; 169 | msg[..32].copy_from_slice(&hash[..32]); 170 | 171 | let addr = 172 | util_eth::addr_from_sig(msg, sig).map_err(|_| Error::::EcdsaRecoverFailure)?; 173 | ensure!(addr == addr_expected, Error::::UnexpectedAddress); 174 | 175 | EthereumLink::::mutate(&account, |addrs| { 176 | let index = index as usize; 177 | // NOTE: allow linking `MAX_ETH_LINKS` eth addresses. 178 | if (index >= addrs.len()) && (addrs.len() != MAX_ETH_LINKS) { 179 | addrs.push(addr.clone()); 180 | } else if (index >= addrs.len()) && (addrs.len() == MAX_ETH_LINKS) { 181 | addrs[MAX_ETH_LINKS - 1] = addr.clone(); 182 | } else { 183 | addrs[index] = addr.clone(); 184 | } 185 | }); 186 | 187 | Self::deposit_event(Event::EthAddressLinked(account, addr.to_vec())); 188 | 189 | Ok(().into()) 190 | } 191 | 192 | /// Link a BTC address to a Litentry account providing a proof signature from the private key 193 | /// of that BTC address. The BTC address may either be a legacy P2PK one (started with b'1') 194 | /// or a Segwit P2PK one (started with b'bc'). 195 | /// 196 | /// The runtime needs to ensure that a malicious index can be handled correctly. 197 | /// Currently, when vec.len > MAX_ETH_LINKS, replacement will always happen at the final index. 198 | /// Otherwise it will use the next new slot unless index is valid against a currently available slot. 199 | /// 200 | /// Parameters: 201 | /// - `account`: The Litentry address that is to be linked 202 | /// - `index`: The index of the linked BTC address that the user wants to replace with. 203 | /// - `addr_expected`: The intended BTC address to link to the origin's Litentry address 204 | /// - `expiring_block_number`: The block number after which this link request will expire 205 | /// - `sig`: The rsv-signature generated by the private key of the addr_expected 206 | /// 207 | /// Emits `BtcAddressLinked` event when successful. 208 | #[pallet::weight(T::WeightInfo::link_btc())] 209 | pub fn link_btc( 210 | origin: OriginFor, 211 | account: T::AccountId, 212 | index: u32, 213 | addr_expected: Vec, 214 | expiring_block_number: T::BlockNumber, 215 | sig: Signature, 216 | ) -> DispatchResultWithPostInfo { 217 | let _ = ensure_signed(origin)?; 218 | 219 | let current_block_number = >::block_number(); 220 | ensure!(expiring_block_number > current_block_number, Error::::LinkRequestExpired); 221 | ensure!( 222 | (expiring_block_number - current_block_number) < 223 | T::BlockNumber::from(EXPIRING_BLOCK_NUMBER_MAX), 224 | Error::::InvalidExpiringBlockNumber 225 | ); 226 | 227 | // TODO: we may enlarge this 2 228 | if addr_expected.len() < 2 { 229 | Err(Error::::InvalidBTCAddress)? 230 | } 231 | 232 | let addr_type = if addr_expected[0] == b'1' { 233 | BTCAddrType::Legacy 234 | } else if addr_expected[0] == b'b' && addr_expected[1] == b'c' { 235 | // TODO: a better way? 236 | BTCAddrType::Segwit 237 | } else { 238 | Err(Error::::InvalidBTCAddress)? 239 | }; 240 | 241 | let bytes = Self::generate_raw_message(&account, expiring_block_number); 242 | 243 | // TODO: seems btc uses sha256??? 244 | let hash = sp_io::hashing::keccak_256(&bytes); 245 | 246 | let mut msg = [0u8; 32]; 247 | msg[..32].copy_from_slice(&hash[..32]); 248 | 249 | let pk = secp256k1_ecdsa_recover_compressed(&sig, &msg) 250 | .map_err(|_| Error::::EcdsaRecoverFailure)?; 251 | 252 | let addr = match addr_type { 253 | BTCAddrType::Legacy => btc::legacy::btc_addr_from_pk(&pk).to_base58(), 254 | // Native P2WPKH is a scriptPubKey of 22 bytes. 255 | // It starts with a OP_0, followed by a canonical push of the keyhash (i.e. 0x0014{20-byte keyhash}) 256 | // keyhash is RIPEMD160(SHA256) of a compressed public key 257 | // https://bitcoincore.org/en/segwit_wallet_dev/ 258 | BTCAddrType::Segwit => { 259 | let pk_hash = btc::legacy::hash160(&pk); 260 | let mut pk = [0u8; 22]; 261 | pk[0] = 0; 262 | pk[1] = 20; 263 | pk[2..].copy_from_slice(&pk_hash); 264 | let wp = WitnessProgram::from_scriptpubkey(&pk.to_vec()) 265 | .map_err(|_| Error::::InvalidBTCAddress)?; 266 | wp.to_address(b"bc".to_vec()).map_err(|_| Error::::InvalidBTCAddress)? 267 | }, 268 | }; 269 | 270 | ensure!(addr == addr_expected, Error::::UnexpectedAddress); 271 | 272 | BitcoinLink::::mutate(&account, |addrs| { 273 | let index = index as usize; 274 | // NOTE: allow linking `MAX_BTC_LINKS` btc addresses. 275 | if (index >= addrs.len()) && (addrs.len() != MAX_BTC_LINKS) { 276 | addrs.push(addr.clone()); 277 | } else if (index >= addrs.len()) && (addrs.len() == MAX_BTC_LINKS) { 278 | addrs[MAX_BTC_LINKS - 1] = addr.clone(); 279 | } else { 280 | addrs[index] = addr.clone(); 281 | } 282 | }); 283 | 284 | Self::deposit_event(Event::BtcAddressLinked(account, addr)); 285 | 286 | Ok(().into()) 287 | } 288 | 289 | /// Initiate a link request to link a Litentry address (= any account in Polkadot ecosystem) 290 | /// to another Litentry address (= any account in Polkadot ecosystem). 291 | /// 292 | /// Parameters: 293 | /// - `account`: The Litentry address that is to be linked 294 | /// - `index`: The index of the linked Litentry address that the user wants to replace with. 295 | #[pallet::weight(T::WeightInfo::link_polkadot())] 296 | pub fn link_polkadot( 297 | origin: OriginFor, 298 | account: T::AccountId, 299 | index: u32, 300 | ) -> DispatchResultWithPostInfo { 301 | let origin = ensure_signed(origin)?; 302 | 303 | // TODO: charge some fee 304 | 305 | >::insert(origin, (account, index)); 306 | 307 | Ok(().into()) 308 | } 309 | 310 | /// Accept a pending `link_polkadot` request to link a Litentry address (= any account in Polkadot ecosystem) 311 | /// to another Litentry address (= any account in Polkadot ecosystem). 312 | /// 313 | /// The runtime needs to ensure that a malicious index can be handled correctly. 314 | /// Currently, when vec.len > MAX_ETH_LINKS, replacement will always happen at the final index. 315 | /// Otherwise it will use the next new slot unless index is valid against a currently available slot. 316 | /// 317 | /// Parameters: 318 | /// - `account`: The Litentry address that is to be linked 319 | /// 320 | /// Emits `PolkadotAddressLinked` event when successful. 321 | #[pallet::weight(T::WeightInfo::accept_polkadot())] 322 | pub fn accept_polkadot( 323 | origin: OriginFor, 324 | account: T::AccountId, 325 | ) -> DispatchResultWithPostInfo { 326 | let origin = ensure_signed(origin)?; 327 | 328 | let (target, index) = Self::polkadot_pending(&account); 329 | ensure!(target == origin, Error::::WrongPendingRequest); 330 | >::remove(&origin); 331 | 332 | PolkadotLink::::mutate(&account, |addrs| { 333 | let index = index as usize; 334 | // NOTE: allow linking `MAX_POLKADOT_LINKS` polkadot addresses. 335 | if (index >= addrs.len()) && (addrs.len() != MAX_POLKADOT_LINKS) { 336 | addrs.push(origin.clone()); 337 | } else if (index >= addrs.len()) && (addrs.len() == MAX_POLKADOT_LINKS) { 338 | addrs[MAX_POLKADOT_LINKS - 1] = origin.clone(); 339 | } else { 340 | addrs[index] = origin.clone(); 341 | } 342 | }); 343 | 344 | Self::deposit_event(Event::PolkadotAddressLinked(account, origin)); 345 | 346 | Ok(().into()) 347 | } 348 | } 349 | 350 | impl Pallet { 351 | /// Assemble the message that the user has signed 352 | /// Format: "Link Litentry: " + Litentry account + expiring block number 353 | fn generate_raw_message( 354 | account: &T::AccountId, 355 | expiring_block_number: T::BlockNumber, 356 | ) -> Vec { 357 | let mut bytes = b"Link Litentry: ".encode(); 358 | let mut account_vec = account.encode(); 359 | let mut expiring_block_number_vec = expiring_block_number.encode(); 360 | 361 | bytes.append(&mut account_vec); 362 | bytes.append(&mut expiring_block_number_vec); 363 | bytes 364 | } 365 | } 366 | } 367 | -------------------------------------------------------------------------------- /pallets/nft/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # NFT Pallet 2 | //! 3 | //! The NFT pallet provides supports for non fungible assets on Litentry 4 | //! 5 | //! ## Overview 6 | //! 7 | //! The NFT pallet enables third parties to issue (mainly identity related) non fungible assets. 8 | //! Currently there are 3 types (check `ClassType`) of non fungible assets: 9 | //! 1. Each instance is directly issued by the corresponding third party: Simple(u32) 10 | //! 2. At issuance, a list of user is provided and only these users may claim: Claim(HashByte32) 11 | //! 3. Can be minted only when the user have 2 specific base non fungible assets: Merge(ID, ID, bool) 12 | //! 13 | //! ## Interface 14 | //! 15 | //! ### Dispatchable Functions 16 | //! #### Class Issuance 17 | //! * `create_class` - Create an NFT class (think the whole CryptoKitties or Hashmask each as a class) 18 | //! 19 | //! #### Instance Generation 20 | //! * `mint` - Mint specified number of instance of `Simple(u32)` type 21 | //! * `claim` - Whitelisted user claim an instance of `Claim(HashByte32)`, with a Merkle proof whose root 22 | //! is the HashByte32 23 | //! * `merge` - From two NFT instance, mint a new NFT instance of `Merge(ID, ID, bool)` type 24 | //! 25 | //! #### Daily User Actions 26 | //! * `transfer` - Transfer ownership of a transferable NFT 27 | //! * `burn` - Burn a burnable NFT 28 | //! 29 | //! [`Call`]: ./enum.Call.html 30 | //! [`Config`]: ./trait.Config.html 31 | 32 | #![cfg_attr(not(feature = "std"), no_std)] 33 | 34 | use enumflags2::BitFlags; 35 | use frame_support::{ 36 | pallet_prelude::*, 37 | traits::{Currency, ExistenceRequirement::KeepAlive, Get}, 38 | transactional, 39 | }; 40 | use frame_system::pallet_prelude::*; 41 | use orml_traits::NFT; 42 | #[cfg(feature = "std")] 43 | use serde::{Deserialize, Serialize}; 44 | use sp_io::hashing::keccak_256; 45 | use sp_runtime::{traits::StaticLookup, DispatchResult, RuntimeDebug}; 46 | use sp_std::vec::Vec; 47 | 48 | #[cfg(test)] 49 | mod mock; 50 | 51 | pub mod benchmarking; 52 | #[cfg(test)] 53 | mod tests; 54 | pub mod weights; 55 | 56 | mod impl_nonfungibles; 57 | pub mod merkle_proof; 58 | 59 | pub use pallet::*; 60 | pub use weights::WeightInfo; 61 | 62 | pub type CID = Vec; 63 | 64 | pub type HashByte32 = [u8; 32]; 65 | 66 | pub const CREATION_FEE: u32 = 100; 67 | 68 | #[repr(u8)] 69 | #[derive(Encode, Decode, Clone, Copy, BitFlags, RuntimeDebug, PartialEq, Eq)] 70 | pub enum ClassProperty { 71 | /// Token can be transferred 72 | Transferable = 0b00000001, 73 | /// Token can be burned 74 | Burnable = 0b00000010, 75 | } 76 | 77 | #[derive(Clone, Copy, PartialEq, Default, RuntimeDebug)] 78 | #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] 79 | pub struct Properties(pub BitFlags); 80 | 81 | impl Eq for Properties {} 82 | impl Encode for Properties { 83 | fn using_encoded R>(&self, f: F) -> R { 84 | self.0.bits().using_encoded(f) 85 | } 86 | } 87 | impl Decode for Properties { 88 | fn decode(input: &mut I) -> sp_std::result::Result { 89 | let field = u8::decode(input)?; 90 | Ok(Self(>::from_bits(field as u8).map_err(|_| "invalid value")?)) 91 | } 92 | } 93 | 94 | #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] 95 | #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] 96 | pub struct ClassData { 97 | /// Property of token 98 | pub properties: Properties, 99 | /// from when user can claim this nft 100 | pub start_block: Option, 101 | /// till when user can claim this nft 102 | pub end_block: Option, 103 | /// type of this NFT class 104 | pub class_type: ClassType, 105 | } 106 | 107 | #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] 108 | #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] 109 | pub struct TokenData { 110 | /// if token is used to generate an advanced nft 111 | pub used: bool, 112 | /// 0 = common, otherwise say 1 = rare, 2 = super rare 113 | pub rarity: u8, 114 | } 115 | 116 | #[derive(Encode, Decode, Clone, RuntimeDebug, PartialEq, Eq)] 117 | #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] 118 | pub enum ClassType { 119 | /// A class that owner can mint instances no more than u32 120 | Simple(u32), 121 | /// A class whitelisted user may claim provided a proof 122 | /// that indicates his/her account is in the Merkle tree with 123 | /// root HashByte32 124 | Claim(HashByte32), 125 | /// A class that is merged from two class ID and ID 126 | /// if true, burn the two instances 127 | Merge(ID, ID, bool), 128 | } 129 | 130 | pub type TokenIdOf = ::TokenId; 131 | pub type ClassIdOf = ::ClassId; 132 | pub type BlockNumberOf = ::BlockNumber; 133 | pub type BalanceOf = 134 | <::Currency as Currency<::AccountId>>::Balance; 135 | 136 | #[frame_support::pallet] 137 | pub mod pallet { 138 | use super::*; 139 | 140 | #[pallet::config] 141 | pub trait Config: 142 | frame_system::Config 143 | + orml_nft::Config< 144 | ClassData = ClassData, ClassIdOf>, 145 | TokenData = TokenData, 146 | > 147 | { 148 | /// The the currency to pay NFT class creation fee. 149 | type Currency: Currency; 150 | 151 | /// The overarching event type. 152 | type Event: From> + IsType<::Event>; 153 | 154 | /// Weight information for the extrinsics in this module. 155 | type WeightInfo: WeightInfo; 156 | 157 | /// The amount of fee to pay to create an NFT class. 158 | #[pallet::constant] 159 | type ClassCreationFee: Get>; 160 | 161 | /// Treasury address 162 | #[pallet::constant] 163 | type Pot: Get; 164 | } 165 | 166 | #[pallet::error] 167 | pub enum Error { 168 | /// ClassId not found 169 | ClassIdNotFound, 170 | /// Class ClaimedList not found (Only for Claim type) 171 | ClassClaimedListNotFound, 172 | /// The operator is not the owner of the token and has no permission 173 | NoPermission, 174 | /// Quantity is invalid. need >= 1 175 | InvalidQuantity, 176 | /// Property of class don't support transfer 177 | NonTransferable, 178 | /// Property of class don't support burn 179 | NonBurnable, 180 | /// Token not found 181 | TokenNotFound, 182 | /// Wrong class type 183 | WrongClassType, 184 | /// Merge nft's base nfts are not provided correctly 185 | WrongMergeBase, 186 | /// Use already used token to merge new token 187 | TokenUsed, 188 | /// Mint more NFT than the maximum allowed 189 | QuantityOverflow, 190 | /// Out of NFT valid issuance period 191 | OutOfCampaignPeriod, 192 | /// NFT for certain user already claimed 193 | TokenAlreadyClaimed, 194 | /// user claim verification fails 195 | UserNotInClaimList, 196 | /// user cannot pay NFT class creation fee 197 | CreationFeeNotPaid, 198 | } 199 | 200 | #[pallet::event] 201 | #[pallet::generate_deposit(pub(crate) fn deposit_event)] 202 | pub enum Event { 203 | /// Created NFT class. \[owner, class_id\] 204 | CreatedClass(T::AccountId, ClassIdOf), 205 | /// Minted NFT token. \[from, to, class_id, start_token_id, quantity\] 206 | MintedToken(T::AccountId, T::AccountId, ClassIdOf, TokenIdOf, u32), 207 | /// Claimed NFT token. \[claimer, class_id, token_id\] 208 | ClaimedToken(T::AccountId, ClassIdOf, TokenIdOf), 209 | /// Merged NFT token. \[owner, class_id, token_id\] 210 | MergedToken(T::AccountId, ClassIdOf, TokenIdOf), 211 | /// Transferred NFT token. \[from, to, class_id, token_id\] 212 | TransferredToken(T::AccountId, T::AccountId, ClassIdOf, TokenIdOf), 213 | /// Burned NFT token. \[owner, class_id, token_id\] 214 | BurnedToken(T::AccountId, ClassIdOf, TokenIdOf), 215 | } 216 | 217 | #[pallet::pallet] 218 | pub struct Pallet(_); 219 | 220 | #[pallet::storage] 221 | #[pallet::getter(fn claimed_list)] 222 | /// Claimed index vec for `Claim(HashByte32)` type NFT class, 223 | /// to guarantee each user claims once. 224 | /// maximal index of claiming user is 2^16 which is more than enough 225 | pub(super) type ClaimedList = 226 | StorageMap<_, Blake2_128Concat, ClassIdOf, Vec, ValueQuery>; 227 | 228 | #[pallet::hooks] 229 | impl Hooks for Pallet {} 230 | 231 | #[pallet::call] 232 | impl Pallet { 233 | /// Create NFT class, each class is a collection of NFT instances. 234 | /// Currently there are 3 types (refer to `ClassType`) 235 | /// 1. Each instance is directly issued by the corresponding third party: Simple(u32) 236 | /// 2. At issuance, a list of user is provided and only these users may claim: Claim(HashByte32) 237 | /// 3. Can be minted only when the user have 2 specific base non fungible assets: Merge(ID, ID, bool) 238 | /// 239 | /// Parameters: 240 | /// - `metadata`: CID identifier of the class's metadata 241 | /// - `properties`: Class property, include `Transferable` `Burnable` 242 | /// - `start_block`: From when the instances can be minted (None if no restriction) 243 | /// - `end_block`: Till when the instances can be minted (None if no restriction) 244 | /// - `class_type`: Type of this class (refer to `ClassType`) 245 | /// 246 | /// Emits `CreatedClass` event when successful. 247 | #[pallet::weight(::WeightInfo::create_class())] 248 | #[transactional] 249 | pub fn create_class( 250 | origin: OriginFor, 251 | metadata: CID, 252 | properties: Properties, 253 | start_block: Option>, 254 | end_block: Option>, 255 | class_type: ClassType>, 256 | ) -> DispatchResultWithPostInfo { 257 | let who = ensure_signed(origin)?; 258 | let next_id = orml_nft::Pallet::::next_class_id(); 259 | 260 | let fee = T::ClassCreationFee::get(); 261 | T::Currency::transfer(&who, &T::Pot::get(), fee, KeepAlive) 262 | .map_err(|_| Error::::CreationFeeNotPaid)?; 263 | 264 | match class_type { 265 | ClassType::Merge(id1, id2, burn) => { 266 | if !burn { 267 | ensure!( 268 | >::classes(id1).is_some(), 269 | Error::::ClassIdNotFound 270 | ); 271 | ensure!( 272 | >::classes(id2).is_some(), 273 | Error::::ClassIdNotFound 274 | ); 275 | } else { 276 | let class_info1 = orml_nft::Pallet::::classes(id1) 277 | .ok_or(Error::::ClassIdNotFound)?; 278 | let class_info2 = orml_nft::Pallet::::classes(id2) 279 | .ok_or(Error::::ClassIdNotFound)?; 280 | 281 | let data1 = class_info1.data; 282 | ensure!( 283 | data1.properties.0.contains(ClassProperty::Burnable), 284 | Error::::NonBurnable 285 | ); 286 | let data2 = class_info2.data; 287 | ensure!( 288 | data2.properties.0.contains(ClassProperty::Burnable), 289 | Error::::NonBurnable 290 | ); 291 | } 292 | } 293 | ClassType::Claim(_) => { 294 | ClaimedList::::insert(next_id, Vec::::new()); 295 | } 296 | _ => {} 297 | } 298 | 299 | let data = ClassData { properties, start_block, end_block, class_type }; 300 | orml_nft::Pallet::::create_class(&who, metadata.to_vec(), data)?; 301 | 302 | Self::deposit_event(Event::CreatedClass(who, next_id)); 303 | Ok(().into()) 304 | } 305 | 306 | /// Mint `Simple(u32)` NFT instances from the class owner 307 | /// 308 | /// Parameters: 309 | /// - `to`: The receiver of the minted NFTs 310 | /// - `class_id`: Identifier of the NFT class to mint 311 | /// - `metadata`: CID identifier of the instance's metadata 312 | /// - `quantity`: number of NFT to mint 313 | /// 314 | /// Emits `MintedToken` event when successful 315 | #[pallet::weight(::WeightInfo::mint(*quantity))] 316 | #[transactional] 317 | pub fn mint( 318 | origin: OriginFor, 319 | to: ::Source, 320 | class_id: ClassIdOf, 321 | metadata: CID, 322 | quantity: u32, 323 | ) -> DispatchResultWithPostInfo { 324 | let who = ensure_signed(origin)?; 325 | let to = T::Lookup::lookup(to)?; 326 | ensure!(quantity >= 1, Error::::InvalidQuantity); 327 | let class_info = 328 | orml_nft::Pallet::::classes(class_id).ok_or(Error::::ClassIdNotFound)?; 329 | ensure!(who == class_info.owner, Error::::NoPermission); 330 | ensure!(Self::check_time(&class_info.data), Error::::OutOfCampaignPeriod); 331 | 332 | match class_info.data.class_type { 333 | ClassType::Simple(max_num) => { 334 | let issued = class_info.total_issuance; 335 | if TokenIdOf::::from(quantity) > (TokenIdOf::::from(max_num) - issued) { 336 | Err(Error::::QuantityOverflow)? 337 | } 338 | } 339 | _ => Err(Error::::WrongClassType)?, 340 | } 341 | 342 | // TODO: adjustible rarity 343 | let data = TokenData { used: false, rarity: 0 }; 344 | let start_token_id = 345 | orml_nft::Pallet::::mint(&to, class_id, metadata.clone(), data.clone())?; 346 | for _ in 1..quantity { 347 | orml_nft::Pallet::::mint(&to, class_id, metadata.clone(), data.clone())?; 348 | } 349 | 350 | Self::deposit_event(Event::MintedToken(who, to, class_id, start_token_id, quantity)); 351 | Ok(().into()) 352 | } 353 | 354 | /// Claim a `Claim(HashByte32)` by a whitelisted user, 355 | /// with a Merkle proof that proves the user's account 356 | /// is in the Merkle tree of the given root 357 | /// 358 | /// Parameters: 359 | /// - `index`: Index of user's Merkle proof 360 | /// - `class_id`: Identifier of the NFT class to mint 361 | /// - `proof`: Merkle proof 362 | /// 363 | /// Emits `ClaimedToken` event when successful 364 | #[pallet::weight(::WeightInfo::mint(1))] 365 | #[transactional] 366 | pub fn claim( 367 | origin: OriginFor, 368 | index: u16, 369 | class_id: ClassIdOf, 370 | proof: Vec, 371 | ) -> DispatchResultWithPostInfo { 372 | let who = ensure_signed(origin)?; 373 | let class_info = 374 | orml_nft::Pallet::::classes(class_id).ok_or(Error::::ClassIdNotFound)?; 375 | 376 | ensure!(ClaimedList::::contains_key(class_id), Error::::ClassClaimedListNotFound); 377 | 378 | ensure!(Self::check_time(&class_info.data), Error::::OutOfCampaignPeriod); 379 | 380 | match class_info.data.class_type { 381 | ClassType::Claim(merkle_root) => { 382 | // check if this user has already claimed 383 | ensure!( 384 | !ClaimedList::::get(class_id).contains(&index), 385 | Error::::TokenAlreadyClaimed 386 | ); 387 | 388 | // calculate hash for this user 389 | let mut bytes = index.encode(); 390 | bytes.append(&mut who.encode()); 391 | let computed_hash = keccak_256(&bytes); 392 | 393 | // verify the proof 394 | ensure!( 395 | merkle_proof::proof_verify(&computed_hash, &proof, &merkle_root), 396 | Error::::UserNotInClaimList 397 | ); 398 | 399 | // push this user's index into already claimed list 400 | ClaimedList::::mutate(class_id, |claimed_vec| { 401 | claimed_vec.push(index); 402 | }); 403 | } 404 | 405 | _ => Err(Error::::WrongClassType)?, 406 | } 407 | 408 | // TODO: adjustable rarity 409 | let data = TokenData { used: false, rarity: 0 }; 410 | 411 | // TODO: if metadata can change? 412 | let metadata = class_info.metadata; 413 | 414 | let next_token_id = 415 | orml_nft::Pallet::::mint(&who, class_id, metadata.to_vec(), data)?; 416 | Self::deposit_event(Event::ClaimedToken(who, class_id, next_token_id)); 417 | Ok(().into()) 418 | } 419 | 420 | /// Merge from two NFT instances and generate a new NFT 421 | /// of type `Merge(ID, ID, bool)` 422 | /// 423 | /// Parameters: 424 | /// - `class_id`: Identifier of the NFT class to mint 425 | /// - `token1`: First NFT of the merge base 426 | /// - `token2`: Seconde NFT of the merge base 427 | /// 428 | /// Emits `MergedToken` event when successful 429 | #[pallet::weight(::WeightInfo::mint(1))] 430 | #[transactional] 431 | pub fn merge( 432 | origin: OriginFor, 433 | class_id: ClassIdOf, 434 | token1: (ClassIdOf, TokenIdOf), 435 | token2: (ClassIdOf, TokenIdOf), 436 | ) -> DispatchResultWithPostInfo { 437 | let who = ensure_signed(origin)?; 438 | let merged_class_info = 439 | orml_nft::Pallet::::classes(class_id).ok_or(Error::::ClassIdNotFound)?; 440 | 441 | ensure!(Self::check_time(&merged_class_info.data), Error::::OutOfCampaignPeriod); 442 | 443 | let mut burn = false; 444 | 445 | if let ClassType::Merge(id1, id2, b) = merged_class_info.data.class_type { 446 | ensure!( 447 | ((id1 == token1.0) && (id2 == token2.0)) 448 | || ((id1 == token2.0) && (id2 == token1.0)), 449 | Error::::WrongMergeBase, 450 | ); 451 | burn = b; 452 | } else { 453 | Err(Error::::WrongClassType)? 454 | } 455 | 456 | // get token 1 and 2 457 | let mut token_info1 = >::tokens(token1.0, token1.1) 458 | .ok_or(Error::::TokenNotFound)?; 459 | let mut token_info2 = >::tokens(token2.0, token2.1) 460 | .ok_or(Error::::TokenNotFound)?; 461 | 462 | // burn or set used of token 1 and 2 463 | if burn { 464 | Self::do_burn(&who, token1)?; 465 | Self::do_burn(&who, token2)?; 466 | } else { 467 | ensure!(!token_info1.data.used && !token_info2.data.used, Error::::TokenUsed); 468 | token_info1.data.used = true; 469 | token_info2.data.used = true; 470 | orml_nft::Tokens::::insert(token1.0, token1.1, token_info1); 471 | orml_nft::Tokens::::insert(token2.0, token2.1, token_info2); 472 | } 473 | 474 | // mint new token 475 | // TODO: adjustible rarity 476 | let data = TokenData { used: false, rarity: 0 }; 477 | 478 | // TODO: if metadata can change? 479 | let metadata = merged_class_info.metadata; 480 | 481 | let next_token_id = 482 | orml_nft::Pallet::::mint(&who, class_id, metadata.to_vec(), data)?; 483 | Self::deposit_event(Event::MergedToken(who, class_id, next_token_id)); 484 | 485 | Ok(().into()) 486 | } 487 | 488 | /// Transfer NFT token to another account, must be transferable 489 | /// 490 | /// Parameters: 491 | /// - `to`: Receiver of the token 492 | /// - `token`: NFT instance to transfer 493 | /// 494 | /// Emits `TransferredToken` event when successful 495 | #[pallet::weight(::WeightInfo::transfer())] 496 | #[transactional] 497 | pub fn transfer( 498 | origin: OriginFor, 499 | to: ::Source, 500 | token: (ClassIdOf, TokenIdOf), 501 | ) -> DispatchResultWithPostInfo { 502 | let who = ensure_signed(origin)?; 503 | let to = T::Lookup::lookup(to)?; 504 | Self::do_transfer(&who, &to, token)?; 505 | Ok(().into()) 506 | } 507 | 508 | /// Burn an NFT token instance, must be burnable 509 | /// 510 | /// Parameters: 511 | /// - `token`: NFT instance to burn 512 | /// 513 | /// Emits `BurnedToken` event when successful 514 | #[pallet::weight(::WeightInfo::burn())] 515 | #[transactional] 516 | pub fn burn( 517 | origin: OriginFor, 518 | token: (ClassIdOf, TokenIdOf), 519 | ) -> DispatchResultWithPostInfo { 520 | let who = ensure_signed(origin)?; 521 | Self::do_burn(&who, token)?; 522 | Ok(().into()) 523 | } 524 | } 525 | } 526 | 527 | impl Pallet { 528 | /// Ensured atomic. 529 | #[transactional] 530 | fn do_transfer( 531 | from: &T::AccountId, 532 | to: &T::AccountId, 533 | token: (ClassIdOf, TokenIdOf), 534 | ) -> DispatchResult { 535 | let class_info = 536 | orml_nft::Pallet::::classes(token.0).ok_or(Error::::ClassIdNotFound)?; 537 | let data = class_info.data; 538 | ensure!( 539 | data.properties.0.contains(ClassProperty::Transferable), 540 | Error::::NonTransferable 541 | ); 542 | 543 | orml_nft::Pallet::::transfer(from, to, token)?; 544 | 545 | Self::deposit_event(Event::TransferredToken(from.clone(), to.clone(), token.0, token.1)); 546 | Ok(()) 547 | } 548 | 549 | /// Ensured atomic. 550 | #[transactional] 551 | fn do_burn(who: &T::AccountId, token: (ClassIdOf, TokenIdOf)) -> DispatchResult { 552 | let class_info = 553 | orml_nft::Pallet::::classes(token.0).ok_or(Error::::ClassIdNotFound)?; 554 | let data = class_info.data; 555 | ensure!(data.properties.0.contains(ClassProperty::Burnable), Error::::NonBurnable); 556 | 557 | let token_info = 558 | orml_nft::Pallet::::tokens(token.0, token.1).ok_or(Error::::TokenNotFound)?; 559 | ensure!(*who == token_info.owner, Error::::NoPermission); 560 | 561 | orml_nft::Pallet::::burn(&who, token)?; 562 | 563 | Self::deposit_event(Event::BurnedToken(who.clone(), token.0, token.1)); 564 | Ok(()) 565 | } 566 | } 567 | 568 | impl Pallet { 569 | /// check if current block time is in the range of the time span given by the 570 | /// token class info 571 | fn check_time(token_info: &ClassData, ClassIdOf>) -> bool { 572 | let current_block_number = >::block_number(); 573 | if let Some(start_block) = token_info.start_block { 574 | if start_block > current_block_number { 575 | return false; 576 | } 577 | } 578 | if let Some(end_block) = token_info.end_block { 579 | if end_block < current_block_number { 580 | return false; 581 | } 582 | } 583 | true 584 | } 585 | } 586 | 587 | impl NFT for Pallet { 588 | type ClassId = ClassIdOf; 589 | type TokenId = TokenIdOf; 590 | type Balance = u128; 591 | 592 | fn balance(who: &T::AccountId) -> Self::Balance { 593 | orml_nft::TokensByOwner::::iter_prefix(who).count() as u128 594 | } 595 | 596 | fn owner(token: (Self::ClassId, Self::TokenId)) -> Option { 597 | orml_nft::Pallet::::tokens(token.0, token.1).map(|t| t.owner) 598 | } 599 | 600 | fn transfer( 601 | from: &T::AccountId, 602 | to: &T::AccountId, 603 | token: (Self::ClassId, Self::TokenId), 604 | ) -> DispatchResult { 605 | Self::do_transfer(from, to, token) 606 | } 607 | } 608 | -------------------------------------------------------------------------------- /pallets/nft/src/tests.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate::mock::{Event, *}; 3 | use frame_support::{assert_noop, assert_ok}; 4 | use sp_runtime::AccountId32; 5 | 6 | fn initial_accounts() -> (AccountId32, AccountId32) { 7 | let alice_account: AccountId32 = AccountId32::from([ 8 | 0xd4, 0x35, 0x93, 0xc7, 0x15, 0xfd, 0xd3, 0x1c, 0x61, 0x14, 0x1a, 0xbd, 0x04, 0xa9, 0x9f, 9 | 0xd6, 0x82, 0x2c, 0x85, 0x58, 0x85, 0x4c, 0xcd, 0xe3, 0x9a, 0x56, 0x84, 0xe7, 0xa5, 0x6d, 10 | 0xa2, 0x7d, 11 | ]); 12 | 13 | let bob_account: AccountId32 = AccountId32::from([ 14 | 0x51u8, 0x82u8, 0xa7u8, 0x3eu8, 0x48u8, 0xbdu8, 0x6eu8, 0x81u8, 0x4du8, 0x0cu8, 0x2bu8, 15 | 0x41u8, 0x67u8, 0x2du8, 0x9cu8, 0xb8u8, 0xc8u8, 0x7cu8, 0x42u8, 0x21u8, 0xb5u8, 0x5bu8, 16 | 0xc0u8, 0x8eu8, 0x09u8, 0x43u8, 0x19u8, 0x8eu8, 0x90u8, 0xcau8, 0xadu8, 0x1fu8, 17 | ]); 18 | 19 | let start_wealth: u64 = (10 * CREATION_FEE + 10).into(); 20 | let _ = Balances::deposit_creating(&alice_account, start_wealth); 21 | let _ = Balances::deposit_creating(&bob_account, start_wealth); 22 | 23 | run_to_block(1); 24 | assert_eq!(System::block_number(), 1); 25 | 26 | let _ = Balances::deposit_creating(&Pot::get(), 1); 27 | 28 | return (alice_account.clone(), bob_account.clone()); 29 | } 30 | 31 | #[test] 32 | // Test for general Event: 33 | // CreatedClass(T::AccountId, ClassIdOf) 34 | // Test for Error: 35 | // WrongClassType 36 | // CreationFeeNotPaid 37 | fn test_general_process() { 38 | new_test_ext().execute_with(|| { 39 | let (alice_account, _bob_account) = initial_accounts(); 40 | 41 | let merkle_root = [ 42 | 0x0cu8, 0x67u8, 0xcau8, 0xf4u8, 0x61u8, 0x29u8, 0x0cu8, 0xd4u8, 0x63u8, 0xe5u8, 0x35u8, 43 | 0x21u8, 0x3fu8, 0x99u8, 0x6eu8, 0x32u8, 0x73u8, 0x6eu8, 0x65u8, 0xa2u8, 0x06u8, 0x37u8, 44 | 0x83u8, 0xfdu8, 0xe5u8, 0x03u8, 0x6bu8, 0x71u8, 0x39u8, 0x6du8, 0xfbu8, 0x0cu8, 45 | ]; 46 | 47 | // Simple: CreatedClass Event 48 | assert_ok!(Nft::create_class( 49 | Origin::signed(alice_account.clone()), 50 | CID::default(), 51 | Properties::default(), 52 | None, 53 | None, 54 | ClassType::Simple(100), 55 | )); 56 | assert_eq!( 57 | events_filter::>()[0], 58 | Event::Nft(crate::Event::CreatedClass(alice_account.clone(), 0)), 59 | ); 60 | 61 | // Claim: CreatedClass Event 62 | assert_ok!(Nft::create_class( 63 | Origin::signed(alice_account.clone()), 64 | CID::default(), 65 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 66 | None, 67 | None, 68 | ClassType::Claim(merkle_root), 69 | )); 70 | assert_eq!( 71 | events_filter::>()[1], 72 | Event::Nft(crate::Event::CreatedClass(alice_account.clone(), 1)), 73 | ); 74 | 75 | // Merge: CreatedClass Event 76 | assert_ok!(Nft::create_class( 77 | Origin::signed(alice_account.clone()), 78 | CID::default(), 79 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 80 | None, 81 | None, 82 | ClassType::Merge(0, 1, false), 83 | )); 84 | assert_eq!( 85 | events_filter::>()[2], 86 | Event::Nft(crate::Event::CreatedClass(alice_account.clone(), 2)), 87 | ); 88 | 89 | //mint 5 instance of merge type token 90 | assert_noop!( 91 | Nft::mint( 92 | Origin::signed(alice_account.clone()), 93 | alice_account.clone(), 94 | 2, 95 | CID::default(), 96 | 5 97 | ), 98 | NftError::WrongClassType 99 | ); 100 | 101 | //CreationFeeNotPaid 102 | let random_account: AccountId32 = AccountId32::from([0u8; 32]); 103 | assert_noop!( 104 | Nft::create_class( 105 | Origin::signed(random_account.clone()), 106 | CID::default(), 107 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 108 | None, 109 | None, 110 | ClassType::Merge(0, 1, false), 111 | ), 112 | NftError::CreationFeeNotPaid 113 | ); 114 | }) 115 | } 116 | 117 | #[test] 118 | // Test for burn function: 119 | // BurnedToken(T::AccountId, ClassIdOf, TokenIdOf) 120 | // Test for Error: 121 | // ClassIdNotFound 122 | // TokenIdNotFound 123 | // NonBurnable 124 | // NoPermission 125 | fn test_burn_process() { 126 | new_test_ext().execute_with(|| { 127 | let (alice_account, bob_account) = initial_accounts(); 128 | 129 | // create Unburnable and Transferable class without start/end restrcition class id 0 130 | assert_ok!(Nft::create_class( 131 | Origin::signed(alice_account.clone()), 132 | CID::default(), 133 | Properties(ClassProperty::Transferable.into()), 134 | None, 135 | None, 136 | ClassType::Simple(100), 137 | )); 138 | 139 | // create Burnable and Transferable class without start/end restrcition class id 1 140 | assert_ok!(Nft::create_class( 141 | Origin::signed(alice_account.clone()), 142 | CID::default(), 143 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 144 | None, 145 | None, 146 | ClassType::Simple(100), 147 | )); 148 | 149 | //mint 5 instance class id 0, 1 150 | assert_ok!(Nft::mint( 151 | Origin::signed(alice_account.clone()), 152 | bob_account.clone(), 153 | 0, 154 | CID::default(), 155 | 5, 156 | )); 157 | assert_ok!(Nft::mint( 158 | Origin::signed(alice_account.clone()), 159 | bob_account.clone(), 160 | 1, 161 | CID::default(), 162 | 5, 163 | )); 164 | 165 | // burn non-exist class token 166 | assert_noop!( 167 | Nft::burn(Origin::signed(bob_account.clone()), (9, 0),), 168 | NftError::ClassIdNotFound 169 | ); 170 | 171 | // burn unburnable class token 172 | assert_noop!( 173 | Nft::burn(Origin::signed(bob_account.clone()), (0, 0),), 174 | NftError::NonBurnable 175 | ); 176 | 177 | // burn exist class but non-exist burnable token 178 | assert_noop!( 179 | Nft::burn(Origin::signed(bob_account.clone()), (1, 10),), 180 | NftError::TokenNotFound 181 | ); 182 | 183 | // someone else burn no-owned token 184 | assert_noop!( 185 | Nft::burn(Origin::signed(alice_account.clone()), (1, 0),), 186 | NftError::NoPermission 187 | ); 188 | 189 | // normal burned 190 | assert_ok!(Nft::burn(Origin::signed(bob_account.clone()), (1, 0),)); 191 | assert_eq!( 192 | events_filter::>()[4], 193 | Event::Nft(crate::Event::BurnedToken(bob_account.clone(), 1, 0)), 194 | ); 195 | }); 196 | } 197 | 198 | #[test] 199 | // Test for transfer function: 200 | // TransferredToken(T::AccountId, ClassIdOf, TokenIdOf) 201 | // Test for Error: 202 | // ClassIdNotFound 203 | // TokenNotFound 204 | // NonBurnable 205 | // NoPermission 206 | fn test_transfer_process() { 207 | new_test_ext().execute_with(|| { 208 | let (alice_account, bob_account) = initial_accounts(); 209 | 210 | // create Burnable and unTransferable class without start/end restrcition class id 0 211 | assert_ok!(Nft::create_class( 212 | Origin::signed(alice_account.clone()), 213 | CID::default(), 214 | Properties(ClassProperty::Burnable.into()), 215 | None, 216 | None, 217 | ClassType::Simple(100), 218 | )); 219 | 220 | // create Burnable and Transferable class without start/end restrcition class id 1 221 | assert_ok!(Nft::create_class( 222 | Origin::signed(alice_account.clone()), 223 | CID::default(), 224 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 225 | None, 226 | None, 227 | ClassType::Simple(100), 228 | )); 229 | 230 | //mint 5 instance class id 0, 1 231 | assert_ok!(Nft::mint( 232 | Origin::signed(alice_account.clone()), 233 | bob_account.clone(), 234 | 0, 235 | CID::default(), 236 | 5, 237 | )); 238 | assert_ok!(Nft::mint( 239 | Origin::signed(alice_account.clone()), 240 | bob_account.clone(), 241 | 1, 242 | CID::default(), 243 | 5, 244 | )); 245 | 246 | // transfer non-exist class token 247 | assert_noop!( 248 | Nft::transfer(Origin::signed(bob_account.clone()), alice_account.clone(), (9, 0),), 249 | NftError::ClassIdNotFound 250 | ); 251 | 252 | // transfer untransferable class token 253 | assert_noop!( 254 | Nft::transfer(Origin::signed(bob_account.clone()), alice_account.clone(), (0, 0),), 255 | NftError::NonTransferable 256 | ); 257 | 258 | // transfer exist class but non-exist token 259 | assert_noop!( 260 | Nft::transfer(Origin::signed(bob_account.clone()), alice_account.clone(), (1, 10),), 261 | // If transfer an non-exist token id, the error will be from orml_nft, who's Event or Error is not defined 262 | orml_nft::Error::::TokenNotFound 263 | ); 264 | 265 | // someone else transfer no-owned token 266 | assert_noop!( 267 | Nft::transfer(Origin::signed(alice_account.clone()), bob_account.clone(), (1, 0),), 268 | // If transfer an no-permission token id, the error will be from orml_nft, who's Event or Error is not defined 269 | orml_nft::Error::::NoPermission 270 | ); 271 | 272 | // normal transfer 273 | assert_ok!(Nft::transfer( 274 | Origin::signed(bob_account.clone()), 275 | alice_account.clone(), 276 | (1, 0), 277 | )); 278 | assert_eq!( 279 | events_filter::>()[4], 280 | Event::Nft(crate::Event::TransferredToken( 281 | bob_account.clone(), 282 | alice_account.clone(), 283 | 1, 284 | 0 285 | )), 286 | ); 287 | }); 288 | } 289 | 290 | #[test] 291 | // Test for Simple type Event: 292 | // MintedToken(T::AccountId, T::AccountId, ClassIdOf, u32) 293 | // Test for Error: 294 | // ClassIdNotFound 295 | // NoPermission, 296 | // InvalidQuantity, 297 | // QuantityOverflow 298 | // OutOfCampaignPeriod 299 | fn test_minted_token_process() { 300 | new_test_ext().execute_with(|| { 301 | let (alice_account, bob_account) = initial_accounts(); 302 | // create Transferable Unburnable class without start/end restrcition 303 | assert_ok!(Nft::create_class( 304 | Origin::signed(alice_account.clone()), 305 | CID::default(), 306 | Properties(ClassProperty::Transferable.into()), 307 | None, 308 | None, 309 | ClassType::Simple(10), 310 | )); 311 | 312 | //mint class Id=1 non-exist class 313 | assert_noop!( 314 | Nft::mint( 315 | Origin::signed(alice_account.clone()), 316 | bob_account.clone(), 317 | 1, 318 | CID::default(), 319 | 1, 320 | ), 321 | NftError::ClassIdNotFound 322 | ); 323 | 324 | //mint 0 invalid instance quantity 325 | assert_noop!( 326 | Nft::mint( 327 | Origin::signed(alice_account.clone()), 328 | bob_account.clone(), 329 | 0, 330 | CID::default(), 331 | 0, 332 | ), 333 | NftError::InvalidQuantity 334 | ); 335 | 336 | //mint 11 exceed the maximum instance limit 337 | assert_noop!( 338 | Nft::mint( 339 | Origin::signed(alice_account.clone()), 340 | bob_account.clone(), 341 | 0, 342 | CID::default(), 343 | 11, 344 | ), 345 | NftError::QuantityOverflow 346 | ); 347 | 348 | //mint 5 instance with right ClassInfo owner 349 | assert_ok!(Nft::mint( 350 | Origin::signed(alice_account.clone()), 351 | bob_account.clone(), 352 | 0, 353 | CID::default(), 354 | 5, 355 | )); 356 | assert_eq!( 357 | events_filter::>()[1], 358 | Event::Nft(crate::Event::MintedToken( 359 | alice_account.clone(), 360 | bob_account.clone(), 361 | 0, 362 | 0, 363 | 5 364 | )) 365 | ); 366 | 367 | //mint 5 instance with wrong ClassInfo owner 368 | assert_noop!( 369 | Nft::mint( 370 | Origin::signed(bob_account.clone()), 371 | bob_account.clone(), 372 | 0, 373 | CID::default(), 374 | 5, 375 | ), 376 | NftError::NoPermission 377 | ); 378 | 379 | // create Transferable Unburnable class with start/end restrcition 380 | assert_ok!(Nft::create_class( 381 | Origin::signed(alice_account.clone()), 382 | CID::default(), 383 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 384 | 10.into(), 385 | 100.into(), 386 | ClassType::Simple(100), 387 | )); 388 | 389 | run_to_block(2); 390 | assert_eq!(System::block_number(), 2); 391 | 392 | //mint 5 instance out of time 393 | assert_noop!( 394 | Nft::mint( 395 | Origin::signed(alice_account.clone()), 396 | bob_account.clone(), 397 | 1, 398 | CID::default(), 399 | 5, 400 | ), 401 | NftError::OutOfCampaignPeriod 402 | ); 403 | 404 | run_to_block(11); 405 | assert_eq!(System::block_number(), 11); 406 | //mint 5 instance within time 407 | assert_ok!(Nft::mint( 408 | Origin::signed(alice_account.clone()), 409 | bob_account.clone(), 410 | 1, 411 | CID::default(), 412 | 5, 413 | )); 414 | assert_eq!( 415 | events_filter::>()[3], 416 | Event::Nft(crate::Event::MintedToken( 417 | alice_account.clone(), 418 | bob_account.clone(), 419 | 1, 420 | 0, 421 | 5 422 | )) 423 | ); 424 | }); 425 | } 426 | 427 | #[test] 428 | // Test for Claim type Event: 429 | // ClaimedToken(T::AccountId, ClassIdOf) 430 | // Test for Error: 431 | // OutOfCampaignPeriod 432 | // NonTransferable, 433 | // ClassClaimedListNotFound 434 | // UserNotInClaimList 435 | // TokenAlreadyClaimed 436 | fn test_claimed_token_process() { 437 | new_test_ext().execute_with(|| { 438 | let (alice_account, bob_account) = initial_accounts(); 439 | 440 | // root is 0x0c67caf461290cd463e535213f996e32736e65a2063783fde5036b71396dfb0c 441 | let merkle_root = [ 442 | 0x0cu8, 0x67u8, 0xcau8, 0xf4u8, 0x61u8, 0x29u8, 0x0cu8, 0xd4u8, 0x63u8, 0xe5u8, 0x35u8, 443 | 0x21u8, 0x3fu8, 0x99u8, 0x6eu8, 0x32u8, 0x73u8, 0x6eu8, 0x65u8, 0xa2u8, 0x06u8, 0x37u8, 444 | 0x83u8, 0xfdu8, 0xe5u8, 0x03u8, 0x6bu8, 0x71u8, 0x39u8, 0x6du8, 0xfbu8, 0x0cu8, 445 | ]; 446 | 447 | // proof of alice is 0xd8b63c7168eef1bc3b00cdf73d1636429a26ab607b52f1de073b1f53edd9302d 448 | let alice_proof = vec![[ 449 | 0xd8u8, 0xb6u8, 0x3cu8, 0x71u8, 0x68u8, 0xeeu8, 0xf1u8, 0xbcu8, 0x3bu8, 0x00u8, 0xcdu8, 450 | 0xf7u8, 0x3du8, 0x16u8, 0x36u8, 0x42u8, 0x9au8, 0x26u8, 0xabu8, 0x60u8, 0x7bu8, 0x52u8, 451 | 0xf1u8, 0xdeu8, 0x07u8, 0x3bu8, 0x1fu8, 0x53u8, 0xedu8, 0xd9u8, 0x30u8, 0x2du8, 452 | ]]; 453 | 454 | // issue a claim class : class id, 0 455 | assert_ok!(Nft::create_class( 456 | Origin::signed(alice_account.clone()), 457 | CID::default(), 458 | Properties(ClassProperty::Burnable.into()), 459 | 10.into(), 460 | 100.into(), 461 | ClassType::Claim(merkle_root), 462 | )); 463 | 464 | // fake simple NFT : class id, 1 465 | assert_ok!(Nft::create_class( 466 | Origin::signed(alice_account.clone()), 467 | CID::default(), 468 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 469 | 10.into(), 470 | 100.into(), 471 | ClassType::Simple(100), 472 | )); 473 | 474 | run_to_block(2); 475 | assert_eq!(System::block_number(), 2); 476 | 477 | // alice claim out of time 478 | assert_noop!( 479 | Nft::claim(Origin::signed(alice_account.clone()), 0, 0, alice_proof.clone(),), 480 | NftError::OutOfCampaignPeriod 481 | ); 482 | 483 | run_to_block(11); 484 | assert_eq!(System::block_number(), 11); 485 | 486 | // alice claims with random proof 487 | assert_noop!( 488 | Nft::claim(Origin::signed(alice_account.clone()), 0, 0, vec![[0u8; 32]],), 489 | NftError::UserNotInClaimList 490 | ); 491 | 492 | // Claim non-existed type 493 | assert_noop!( 494 | Nft::claim(Origin::signed(alice_account.clone()), 0, 1, alice_proof.clone(),), 495 | NftError::ClassClaimedListNotFound //WrongClassType // should we raise this error first??? This can never be triggered. 496 | ); 497 | 498 | // alice claims with alice's proof 499 | assert_ok!(Nft::claim(Origin::signed(alice_account.clone()), 0, 0, alice_proof.clone(),)); 500 | assert_eq!( 501 | events_filter::>()[2], 502 | Event::Nft(crate::Event::ClaimedToken(alice_account.clone(), 0, 0)) 503 | ); 504 | 505 | // alice claims again 506 | assert_noop!( 507 | Nft::claim(Origin::signed(alice_account.clone()), 0, 0, alice_proof,), 508 | NftError::TokenAlreadyClaimed 509 | ); 510 | 511 | // alice transfer non-transferable token class id 0 512 | assert_noop!( 513 | Nft::transfer(Origin::signed(alice_account.clone()), bob_account.clone(), (0, 1)), 514 | NftError::NonTransferable 515 | ); 516 | }) 517 | } 518 | 519 | #[test] 520 | // Test for Merge type Event: 521 | // MergedToken(T::AccountId, ClassIdOf) 522 | // TransferredToken(T::AccountId, T::AccountId, ClassIdOf, TokenIdOf) 523 | // Test for Error: 524 | // WrongMergeBase 525 | // NonBurnable 526 | // OutOfCampaignPeriod 527 | // TokenNotFound 528 | // TokenUsed 529 | fn test_merged_token_process() { 530 | new_test_ext().execute_with(|| { 531 | let (alice_account, bob_account) = initial_accounts(); 532 | 533 | // issue basic unburnable NFTs : class id 0, 1 534 | assert_ok!(Nft::create_class( 535 | Origin::signed(alice_account.clone()), 536 | CID::default(), 537 | Properties(ClassProperty::Transferable.into()), 538 | None, 539 | None, 540 | ClassType::Simple(10), 541 | )); 542 | 543 | assert_ok!(Nft::create_class( 544 | Origin::signed(alice_account.clone()), 545 | CID::default(), 546 | Properties(ClassProperty::Transferable.into()), 547 | None, 548 | None, 549 | ClassType::Simple(10), 550 | )); 551 | 552 | // issue basic burnable NFTs : class id 2, 3 553 | assert_ok!(Nft::create_class( 554 | Origin::signed(alice_account.clone()), 555 | CID::default(), 556 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 557 | None, 558 | None, 559 | ClassType::Simple(10), 560 | )); 561 | 562 | assert_ok!(Nft::create_class( 563 | Origin::signed(alice_account.clone()), 564 | CID::default(), 565 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 566 | None, 567 | None, 568 | ClassType::Simple(10), 569 | )); 570 | 571 | // mint unburnable NFTs : class id 0 572 | assert_ok!(Nft::mint( 573 | Origin::signed(alice_account.clone()), 574 | bob_account.clone(), 575 | 0, 576 | CID::default(), 577 | 10, 578 | )); 579 | assert_eq!( 580 | events_filter::>()[4], 581 | Event::Nft(crate::Event::MintedToken( 582 | alice_account.clone(), 583 | bob_account.clone(), 584 | 0, 585 | 0, 586 | 10 587 | )) 588 | ); 589 | 590 | // mint unburnable NFTs : class id 1 591 | assert_ok!(Nft::mint( 592 | Origin::signed(alice_account.clone()), 593 | bob_account.clone(), 594 | 1, 595 | CID::default(), 596 | 10, 597 | )); 598 | assert_eq!( 599 | events_filter::>()[5], 600 | Event::Nft(crate::Event::MintedToken( 601 | alice_account.clone(), 602 | bob_account.clone(), 603 | 1, 604 | 0, 605 | 10 606 | )) 607 | ); 608 | 609 | // mint burnable NFTs : class id 2, 3 610 | assert_ok!(Nft::mint( 611 | Origin::signed(alice_account.clone()), 612 | bob_account.clone(), 613 | 2, 614 | CID::default(), 615 | 10, 616 | )); 617 | assert_ok!(Nft::mint( 618 | Origin::signed(alice_account.clone()), 619 | bob_account.clone(), 620 | 3, 621 | CID::default(), 622 | 10, 623 | )); 624 | 625 | // issue advanced NFTs 626 | assert_noop!( 627 | Nft::create_class( 628 | Origin::signed(alice_account.clone()), 629 | CID::default(), 630 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 631 | 10.into(), 632 | 100.into(), 633 | ClassType::Merge(0, 1, true), 634 | ), 635 | NftError::NonBurnable 636 | ); 637 | 638 | // issue base unburn merge NFTs : class id 4 639 | assert_ok!(Nft::create_class( 640 | Origin::signed(alice_account.clone()), 641 | CID::default(), 642 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 643 | 10.into(), 644 | 100.into(), 645 | ClassType::Merge(2, 3, false), 646 | )); 647 | 648 | // issue base burn merge NFTs: class id 5 649 | assert_ok!(Nft::create_class( 650 | Origin::signed(alice_account.clone()), 651 | CID::default(), 652 | Properties(ClassProperty::Transferable | ClassProperty::Burnable), 653 | 10.into(), 654 | 100.into(), 655 | ClassType::Merge(2, 3, true), 656 | )); 657 | 658 | //---------------------------------------------// 659 | //---unburn merge NFT--------------------------// 660 | // merge out of time 661 | assert_noop!( 662 | Nft::merge(Origin::signed(bob_account.clone()), 4, (2, 9), (3, 9),), 663 | NftError::OutOfCampaignPeriod 664 | ); 665 | 666 | run_to_block(11); 667 | assert_eq!(System::block_number(), 11); 668 | 669 | // merge existed but wrong base class type 670 | assert_noop!( 671 | Nft::merge(Origin::signed(bob_account.clone()), 4, (0, 11), (1, 11),), 672 | NftError::WrongMergeBase 673 | ); 674 | 675 | // merge existed class but non-existed token 676 | assert_noop!( 677 | Nft::merge(Origin::signed(bob_account.clone()), 4, (2, 11), (3, 11),), 678 | NftError::TokenNotFound 679 | ); 680 | 681 | // merge existed class and existed token 682 | assert_ok!(Nft::merge(Origin::signed(bob_account.clone()), 4, (2, 9), (3, 9),)); 683 | assert_eq!( 684 | events_filter::>()[10], 685 | Event::Nft(crate::Event::MergedToken(bob_account.clone(), 4, 0)) 686 | ); 687 | 688 | // merge existed class and existed token again for used token 689 | assert_noop!( 690 | Nft::merge(Origin::signed(bob_account.clone()), 4, (2, 9), (3, 9),), 691 | NftError::TokenUsed 692 | ); 693 | 694 | //---------------------// 695 | //---burn merge NFT--------------------------// 696 | assert_ok!(Nft::merge(Origin::signed(bob_account.clone()), 5, (2, 9), (3, 9),)); 697 | 698 | // merge will generate burn event 699 | assert_eq!( 700 | events_filter::>()[11], 701 | Event::Nft(crate::Event::BurnedToken(bob_account.clone(), 2, 9)), 702 | ); 703 | assert_eq!( 704 | events_filter::>()[12], 705 | Event::Nft(crate::Event::BurnedToken(bob_account.clone(), 3, 9)), 706 | ); 707 | // merge event 708 | assert_eq!( 709 | events_filter::>()[13], 710 | Event::Nft(crate::Event::MergedToken(bob_account.clone(), 5, 0)) 711 | ); 712 | 713 | // check the owner of burned token is account #0 714 | let random_account: AccountId32 = AccountId32::from([0u8; 32]); 715 | assert_eq!(Nft::owner((2, 9)).unwrap_or(random_account.clone()), random_account); 716 | 717 | // transfer class id=2 token id=8 to account #0 718 | assert_ok!(Nft::transfer( 719 | Origin::signed(bob_account.clone()), 720 | random_account.clone(), 721 | (2, 8) 722 | )); 723 | assert_eq!( 724 | events_filter::>()[14], 725 | Event::Nft(crate::Event::TransferredToken( 726 | bob_account.clone(), 727 | random_account.clone(), 728 | 2, 729 | 8 730 | )) 731 | ); 732 | assert_eq!(Nft::owner((2, 8)).unwrap_or(random_account.clone()), random_account); 733 | }); 734 | } 735 | -------------------------------------------------------------------------------- /pallets/offchain-worker/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # Offchain Worker 2 | //! The pallet is responsible for get the external assets claim from the extrinsic and then query and aggregate the 3 | //! balance (btc and eth) according to linked external accounts in account linker pallet. Offchain worker get the data 4 | //! from most popular websire like etherscan, infura and blockinfo. After get the balance, Offchain worker emit the event 5 | //! with balance info and store them on chain for on-chain query. 6 | //! 7 | //! ## API token 8 | //! The offchain worker need the API token to query data from third party data provider. Currently, offchain worker get 9 | //! the API tokens from a local server. Then store the API tokens in offchain worder local storage. 10 | //! 11 | 12 | #![cfg_attr(not(feature = "std"), no_std)] 13 | 14 | // everything define in pallet mod must be public 15 | use codec::{Codec, Decode, Encode}; 16 | pub use pallet::*; 17 | use sp_core::crypto::KeyTypeId; 18 | 19 | pub mod urls; 20 | pub mod utils; 21 | pub mod weights; 22 | 23 | #[cfg(feature = "runtime-benchmarks")] 24 | pub mod benchmarking; 25 | 26 | #[cfg(test)] 27 | mod tests; 28 | 29 | const TOKEN_SERVER_URL: &str = "http://127.0.0.1:4000"; 30 | pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"ocw!"); 31 | 32 | #[frame_support::pallet] 33 | pub mod pallet { 34 | /// Unique key for query 35 | #[derive(Encode, Decode, Default, Debug)] 36 | pub struct QueryKey { 37 | pub account: AccountId, 38 | pub data_source: urls::DataSource, 39 | } 40 | 41 | pub mod crypto { 42 | use super::KEY_TYPE; 43 | use sp_core::sr25519::Signature as Sr25519Signature; 44 | use sp_runtime::{ 45 | app_crypto::{app_crypto, sr25519}, 46 | traits::Verify, 47 | MultiSignature, MultiSigner, 48 | }; 49 | app_crypto!(sr25519, KEY_TYPE); 50 | 51 | pub struct TestAuthId; 52 | impl frame_system::offchain::AppCrypto for TestAuthId { 53 | type RuntimeAppPublic = Public; 54 | type GenericSignature = sp_core::sr25519::Signature; 55 | type GenericPublic = sp_core::sr25519::Public; 56 | } 57 | 58 | impl 59 | frame_system::offchain::AppCrypto< 60 | ::Signer, 61 | Sr25519Signature, 62 | > for TestAuthId 63 | { 64 | type RuntimeAppPublic = Public; 65 | type GenericSignature = sp_core::sr25519::Signature; 66 | type GenericPublic = sp_core::sr25519::Public; 67 | } 68 | } 69 | 70 | use crate::*; 71 | use core::convert::TryInto; 72 | use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*}; 73 | use frame_system::{ 74 | ensure_signed, 75 | offchain::{AppCrypto, CreateSignedTransaction, SendSignedTransaction, Signer}, 76 | pallet_prelude::*, 77 | }; 78 | use sp_std::{ 79 | collections::btree_map::{BTreeMap, Entry}, 80 | fmt::Debug, 81 | prelude::*, 82 | }; 83 | 84 | use frame_support::{ 85 | dispatch, 86 | traits::{Currency, Imbalance, OnUnbalanced}, 87 | }; 88 | use sp_runtime::{ 89 | offchain::storage::StorageValueRef, 90 | traits::{AtLeast32BitUnsigned, MaybeSerializeDeserialize, Member}, 91 | }; 92 | use weights::WeightInfo; 93 | 94 | type PositiveImbalanceOf = <::Currency as Currency< 95 | ::AccountId, 96 | >>::PositiveImbalance; 97 | 98 | #[pallet::config] 99 | pub trait Config: 100 | frame_system::Config + account_linker::Config + CreateSignedTransaction> 101 | { 102 | type Balance: Parameter 103 | + Member 104 | + AtLeast32BitUnsigned 105 | + Codec 106 | + Default 107 | + Copy 108 | + MaybeSerializeDeserialize; 109 | type Event: From> + IsType<::Event>; 110 | type Call: From>; 111 | type AuthorityId: AppCrypto; 112 | type QueryTaskRedundancy: Get; 113 | type QuerySessionLength: Get; 114 | /// Currency type for this pallet. 115 | type Currency: Currency; 116 | /// Handler for the unbalanced increment when rewarding (minting rewards) 117 | type Reward: OnUnbalanced>; 118 | type OcwQueryReward: Get<<::Currency as Currency<::AccountId>>::Balance>; 119 | type WeightInfo: weights::WeightInfo; 120 | 121 | /// The maximum weight indicate the maximum weight used for data aggregation at the end of a session 122 | type MaximumWeightForDataAggregation: Get; 123 | 124 | /// The maximum commits per session defined to avoid the weight used for data aggregation exceed MaximumWeightForDataAggregation 125 | /// The weights used is depends on the data size in CommitAccountBalance, as a reference, the weights for 100 items can be found 126 | /// in weights.rs dummy() -> Weight. need re-run the benchmarking according to this parameter and guarantee the data is reasonable 127 | type MaximumCommitsPerSession: Get; 128 | } 129 | 130 | #[pallet::hooks] 131 | impl Hooks> for Pallet { 132 | /// The on_initialize don't need computation or DB access 133 | /// It just return the weight of on_finalize 134 | fn on_initialize(block_number: T::BlockNumber) -> Weight { 135 | log::info!("ocw on_initialize {:?}.", block_number); 136 | ::WeightInfo::on_finalize() 137 | } 138 | 139 | /// The on_finalize trigger the query result aggregation. 140 | /// The argument block_number has big impact on the weight. 141 | fn on_finalize(block_number: T::BlockNumber) { 142 | log::info!("ocw on_finalize.{:?}.", block_number); 143 | Self::do_finalize(block_number); 144 | } 145 | 146 | /// TODO block N offchain_worker will be called after block N+1 finalize 147 | /// Trigger by offchain framework in each block 148 | fn offchain_worker(block_number: T::BlockNumber) { 149 | log::info!("ocw hook function called on block {:?}.", block_number); 150 | 151 | let query_session_length: usize = T::QuerySessionLength::get() as usize; 152 | 153 | let index_in_session = TryInto::::try_into(block_number) 154 | .map_or(query_session_length, |bn| bn % query_session_length); 155 | 156 | // Start query at second block of a session 157 | if index_in_session == 1 { 158 | // TODO make use of the returned value of start, 159 | // and adjust the logics of OCW accordingly 160 | let _ = Self::start(block_number); 161 | } 162 | } 163 | } 164 | 165 | #[pallet::event] 166 | #[pallet::metadata(T::AccountId = "AccountId", T::BlockNumber = "BlockNumber")] 167 | pub enum Event { 168 | BalanceGot(T::AccountId, T::BlockNumber, Option, Option), 169 | } 170 | 171 | // Errors inform users that something went wrong. 172 | #[pallet::error] 173 | pub enum Error { 174 | /// Error number parsing. 175 | InvalidNumber, 176 | /// Account already in claim list. 177 | AccountAlreadyInClaimlist, 178 | /// Invalid data source 179 | InvalidDataSource, 180 | /// Invalid commit block number 181 | InvalidCommitBlockNumber, 182 | /// Invalid commit slot 183 | InvalidCommitSlot, 184 | /// Invalid account index 185 | InvalidAccountIndex, 186 | /// Offchain worker index overflow 187 | OffchainWorkerIndexOverflow, 188 | /// Token Server no response 189 | TokenServerNoResponse, 190 | /// Storage retrieval error 191 | InvalidStorageRetrieval, 192 | /// Too much commits in a session 193 | TooMuchCommitsInSession, 194 | } 195 | 196 | #[pallet::pallet] 197 | #[pallet::generate_store(pub(super) trait Store)] 198 | pub struct Pallet(_); 199 | 200 | #[pallet::storage] 201 | #[pallet::getter(fn total_claims)] 202 | pub(super) type TotalClaims = StorageValue<_, u64>; 203 | 204 | #[pallet::storage] 205 | #[pallet::getter(fn query_account_set)] 206 | pub(super) type ClaimAccountSet = 207 | StorageMap<_, Blake2_128Concat, T::AccountId, (), ValueQuery>; 208 | 209 | #[pallet::storage] 210 | #[pallet::getter(fn claim_account_index)] 211 | pub(super) type ClaimAccountIndex = 212 | StorageMap<_, Blake2_128Concat, T::AccountId, Option, ValueQuery>; 213 | 214 | #[pallet::storage] 215 | #[pallet::getter(fn account_balance)] 216 | pub(super) type AccountBalance = 217 | StorageMap<_, Blake2_128Concat, T::AccountId, (Option, Option), ValueQuery>; 218 | 219 | /// Record account's btc and ethereum balance 220 | #[pallet::storage] 221 | #[pallet::getter(fn commit_account_balance)] 222 | pub(super) type CommitAccountBalance = StorageDoubleMap< 223 | _, 224 | Blake2_128Concat, 225 | T::AccountId, 226 | Blake2_128Concat, 227 | QueryKey, 228 | Option, 229 | ValueQuery, 230 | >; 231 | 232 | #[pallet::storage] 233 | #[pallet::getter(fn ocw_account_index)] 234 | pub(super) type OcwAccountIndex = 235 | StorageMap<_, Blake2_128Concat, T::AccountId, Option, ValueQuery>; 236 | 237 | #[pallet::storage] 238 | #[pallet::getter(fn commit_number_in_session)] 239 | pub(super) type CommitNumberInSession = StorageValue<_, u32>; 240 | 241 | #[pallet::call] 242 | impl Pallet { 243 | /// Request the Litentry to query balances of linked Eth and BTC accounts. 244 | /// 245 | /// This will alter `ClaimAccountSet` in storage. 246 | /// 247 | /// The dispatch origin for this call is `account`. 248 | /// 249 | /// # 250 | /// - Independent of the arguments. 251 | /// - Contains a limited number of reads and writes. 252 | /// --------------------- 253 | /// - Base Weight: 254 | /// - Creating: 27.56 µs 255 | /// - Killing: 35.11 µs 256 | /// - DB Weight: 1 Read, 1 Write 257 | /// # 258 | /// 259 | #[pallet::weight(::WeightInfo::asset_claim())] 260 | pub fn asset_claim(origin: OriginFor) -> DispatchResultWithPostInfo { 261 | let account = ensure_signed(origin)?; 262 | 263 | // If the same claim already in set 264 | ensure!( 265 | !>::contains_key(&account), 266 | Error::::AccountAlreadyInClaimlist 267 | ); 268 | 269 | >::insert(&account, ()); 270 | 271 | Ok(().into()) 272 | } 273 | 274 | /// Offchain worker submit linked Eth and BTC balance via extrinsic. 275 | /// 276 | /// Extrinsic Arguments. 277 | /// account: the target account offchain-worker query data for. 278 | /// block_number: the block number for offchain-worker trigger the query. 279 | /// data_source: the enum for different data source defined in urls.rs. 280 | /// balance: the balance returned from data source. 281 | /// 282 | /// This will alter `CommitAccountBalance` in storage. 283 | /// 284 | /// The dispatch origin for this call is `account`. 285 | /// 286 | /// # 287 | /// - Independent of the arguments. 288 | /// - Contains a limited number of reads and writes. 289 | /// --------------------- 290 | /// - Base Weight: 291 | /// - Creating: 27.56 µs 292 | /// - Killing: 35.11 µs 293 | /// - DB Weight: 1 Read, 1 Write 294 | /// # 295 | /// 296 | #[pallet::weight(::WeightInfo::submit_balance())] 297 | pub fn submit_balance( 298 | origin: OriginFor, 299 | account: T::AccountId, 300 | block_number: T::BlockNumber, 301 | data_source: urls::DataSource, 302 | balance: u128, 303 | ) -> DispatchResultWithPostInfo { 304 | let sender = ensure_signed(origin)?; 305 | 306 | // Check data source 307 | Self::valid_data_source(data_source)?; 308 | 309 | // Check block number 310 | Self::valid_commit_block_number( 311 | block_number, 312 | >::block_number(), 313 | )?; 314 | 315 | // Check the commit slot 316 | Self::valid_commit_slot( 317 | account.clone(), 318 | Self::get_ocw_index(Some(&account)), 319 | data_source, 320 | )?; 321 | 322 | // Check the commit number in a session 323 | match Self::commit_number_in_session() { 324 | Some(commits) => { 325 | ensure!(commits == T::MaximumCommitsPerSession::get(), >::TooMuchCommitsInSession); 326 | CommitNumberInSession::::set(Some(commits + 1)); 327 | }, 328 | None => {CommitNumberInSession::::set(Some(1));}, 329 | }; 330 | 331 | // put query result on chain 332 | CommitAccountBalance::::insert( 333 | &sender, 334 | &QueryKey { account, data_source }, 335 | Some(balance), 336 | ); 337 | 338 | Ok(().into()) 339 | } 340 | } 341 | 342 | impl Pallet { 343 | // 344 | fn do_finalize(block_number: T::BlockNumber) { 345 | let query_session_length: usize = T::QuerySessionLength::get() as usize; 346 | let index_in_session = TryInto::::try_into(block_number).map_or(query_session_length, |bn| bn % query_session_length); 347 | let last_block_number = query_session_length - 1; 348 | 349 | // Clear claim at the first block of a session 350 | if index_in_session == 0 { 351 | Self::clear_claim(); 352 | // Do aggregation at last block of a session 353 | } else if index_in_session == last_block_number { 354 | Self::aggregate_query_result(); 355 | } 356 | } 357 | 358 | // Main entry for ocw 359 | fn query(block_number: T::BlockNumber, info: &urls::TokenInfo) { 360 | // Get my ocw account for submit query result 361 | let offchain_worker_account = StorageValueRef::persistent(b"offchain-worker::account"); 362 | 363 | // Get my ocw index 364 | let ocw_account_index = match offchain_worker_account.get::() { 365 | Ok(Some(account)) => Self::get_ocw_index(Some(&account)), 366 | _ => Self::get_ocw_index(None), 367 | }; 368 | 369 | // ocw length 370 | let mut ocw_length = Self::get_ocw_length(); 371 | if ocw_length == 0 { 372 | // No ocw in last round, set it as one, then new ocw query for all accounts and all data source 373 | ocw_length = 1; 374 | } 375 | 376 | // Loop for each account 377 | for item in >::iter() { 378 | let account: T::AccountId = item.0; 379 | match item.1 { 380 | Some(account_index) => { 381 | let mut source_index = 0; 382 | for source in &urls::DATA_SOURCE_LIST { 383 | let task_index = 384 | urls::TOTAL_DATA_SOURCE_NUMBER * account_index + source_index; 385 | if task_index % ocw_length == ocw_account_index { 386 | match source { 387 | urls::DataSource::EthEtherScan => 388 | match Self::get_balance_from_etherscan(&account, info) { 389 | Some(balance) => Self::offchain_signed_tx( 390 | account.clone(), 391 | block_number, 392 | urls::DataSource::EthEtherScan, 393 | balance, 394 | ), 395 | None => (), 396 | }, 397 | urls::DataSource::EthInfura => 398 | match Self::get_balance_from_infura(&account, info) { 399 | Some(balance) => Self::offchain_signed_tx( 400 | account.clone(), 401 | block_number, 402 | urls::DataSource::EthInfura, 403 | balance, 404 | ), 405 | None => (), 406 | }, 407 | urls::DataSource::BtcBlockChain => 408 | match Self::get_balance_from_blockchain_info(&account, info) 409 | { 410 | Some(balance) => Self::offchain_signed_tx( 411 | account.clone(), 412 | block_number, 413 | urls::DataSource::BtcBlockChain, 414 | balance, 415 | ), 416 | None => (), 417 | }, 418 | _ => (), 419 | }; 420 | } 421 | source_index = source_index + 1; 422 | } 423 | }, 424 | None => (), 425 | } 426 | } 427 | } 428 | 429 | // Clear claim accounts in last session 430 | fn clear_claim() { 431 | // Reset the commit number for next session 432 | CommitNumberInSession::::set(Some(0)); 433 | 434 | // Remove all account index in last session 435 | >::remove_all(None); 436 | 437 | let accounts: Vec = 438 | >::iter().map(|(k, _)| k).collect(); 439 | 440 | // Set account index 441 | for (index, account) in accounts.iter().enumerate() { 442 | >::insert(&account, Some(index as u32)); 443 | } 444 | 445 | // Remove all claimed accounts 446 | >::remove_all(None); 447 | } 448 | 449 | // Start new round of offchain worker 450 | fn start(block_number: T::BlockNumber) -> Result<(), Error> { 451 | let local_token = StorageValueRef::persistent(b"offchain-worker::token"); 452 | 453 | match local_token.get::() { 454 | Ok(Some(token)) => { 455 | log::info!("API keys found! Start to query from sources."); 456 | Self::query(block_number, &token); 457 | Ok(()) 458 | }, 459 | Ok(None) => { 460 | log::info!("No API keys stored! Request keys from local server."); 461 | // Get token from local server 462 | urls::get_token().map_err(|_| Error::::TokenServerNoResponse ) 463 | }, 464 | Err(_) => { 465 | Err(Error::::InvalidStorageRetrieval) 466 | }, 467 | } 468 | } 469 | 470 | /// Aggregate query result and then record on chain 471 | /// --------------------- 472 | /// Algorithm description as following: 473 | /// 1. collect all query result from `CommitAccountBalance` 474 | /// 2. select the most frequent result as final, then store them on-chain 475 | /// 3. store the successful commit according to off-chain worker account 476 | /// 4. reward the off-chain worker based on its correct query and submit 477 | /// 5. update the Eth and BTC balances on-chain 478 | /// 6. remove old off-chain worker index and generate new one via convert map to vector 479 | /// use vector's index as new off-chain worker index, make it variable and random 480 | /// 7. finally, remove all intermediate on-chain storage, make it empty for next round query 481 | fn aggregate_query_result() { 482 | let mut result_map: BTreeMap<(T::AccountId, urls::BlockChainType, u128), u32> = 483 | BTreeMap::new(); 484 | let mut result_key: BTreeMap<(T::AccountId, urls::BlockChainType), Vec> = 485 | BTreeMap::new(); 486 | // Statistics for result 487 | for result in >::iter() { 488 | let account: T::AccountId = result.1.account; 489 | let data_source: urls::DataSource = result.1.data_source; 490 | let block_type: urls::BlockChainType = 491 | urls::data_source_to_block_chain_type(data_source); 492 | 493 | match result.2 { 494 | Some(balance) => { 495 | let map_key = (account.clone(), block_type, balance); 496 | 497 | result_map.entry(map_key.clone()).or_insert(1_32); 498 | 499 | match result_map.entry(map_key.clone()) { 500 | Entry::Occupied(mut entry) => { 501 | *entry.get_mut() = entry.get() + 1; 502 | }, 503 | Entry::Vacant(v) => { 504 | v.insert(1_u32); 505 | }, 506 | }; 507 | 508 | let key_key = (account, block_type); 509 | match result_key.get(&key_key) { 510 | Some(balance_vec) => { 511 | let mut found = false; 512 | for item in balance_vec.iter() { 513 | if *item == balance { 514 | found = true; 515 | break 516 | } 517 | } 518 | if !found { 519 | let mut new_balance_vec: Vec = balance_vec.clone(); 520 | new_balance_vec.push(balance); 521 | result_key.insert(key_key, new_balance_vec); 522 | } 523 | }, 524 | None => { 525 | result_key.insert(key_key, vec![balance]); 526 | }, 527 | }; 528 | }, 529 | None => (), 530 | } 531 | } 532 | 533 | // Store on chain, record_map will used to reward ocw. 534 | let mut record_map: BTreeMap<(T::AccountId, urls::BlockChainType), u128> = 535 | BTreeMap::new(); 536 | for result in result_key.iter() { 537 | let account: T::AccountId = result.0 .0.clone(); 538 | let block_type: urls::BlockChainType = result.0 .1; 539 | 540 | let mut most_value = 0_u128; 541 | let mut most_times = 0_u32; 542 | 543 | for balance in result.1 { 544 | let key = (account.clone(), block_type, *balance); 545 | match result_map.get(&key) { 546 | Some(frequence) => 547 | if *frequence > most_times { 548 | most_times = *frequence; 549 | most_value = *balance; 550 | }, 551 | None => {}, 552 | } 553 | } 554 | record_map.insert((account.clone(), block_type), most_value); 555 | 556 | // Update balance on chain 557 | if block_type == urls::BlockChainType::ETH { 558 | >::mutate(account, |value| value.1 = Some(most_value)); 559 | Self::increment_total_claims(); 560 | } else if block_type == urls::BlockChainType::BTC { 561 | >::mutate(account, |value| value.0 = Some(most_value)); 562 | Self::increment_total_claims(); 563 | } 564 | } 565 | 566 | // Remove all old ocw index 567 | >::remove_all(None); 568 | 569 | let mut account_index = 0_u32; 570 | let mut total_imbalance = >::zero(); 571 | 572 | // Put account into index map for next session 573 | for result in >::iter() { 574 | let ocw_account: T::AccountId = result.0; 575 | let query_account: T::AccountId = result.1.account; 576 | let data_source: urls::DataSource = result.1.data_source; 577 | let block_type: urls::BlockChainType = 578 | urls::data_source_to_block_chain_type(data_source); 579 | 580 | match result.2 { 581 | Some(committed_balance) => { 582 | // reward the ocw 583 | match record_map.get(&(query_account, block_type)) { 584 | Some(balance) => { 585 | // balance matched 586 | if *balance == committed_balance { 587 | let r = T::Currency::deposit_into_existing( 588 | &ocw_account, 589 | T::OcwQueryReward::get(), 590 | ) 591 | .ok(); 592 | total_imbalance.maybe_subsume(r); 593 | } 594 | }, 595 | None => {}, 596 | } 597 | // update index for next session 598 | match Self::ocw_account_index(ocw_account.clone()) { 599 | Some(_) => {}, 600 | None => { 601 | >::insert(ocw_account, Some(account_index)); 602 | account_index = account_index + 1; 603 | }, 604 | } 605 | }, 606 | None => (), 607 | } 608 | } 609 | 610 | T::Reward::on_unbalanced(total_imbalance); 611 | 612 | // Remove all ocw commit in this session after aggregation 613 | >::remove_all(None); 614 | } 615 | 616 | fn increment_total_claims() { 617 | match Self::total_claims() { 618 | Some(claims) => TotalClaims::::put(claims + 1), 619 | None => TotalClaims::::put(1), 620 | } 621 | } 622 | 623 | /// Validate if the off-chain worker with correct index to commit the query 624 | /// --------------------- 625 | /// Each off-chain worker has index in the off-chain worker queue 626 | /// Each query also has index in the query task queue 627 | /// The method used to check if the both index are matched or not 628 | fn valid_commit_slot( 629 | account: T::AccountId, 630 | ocw_index: u32, 631 | data_source: urls::DataSource, 632 | ) -> dispatch::DispatchResult { 633 | // account claimed the asset query 634 | let ocw_account_index = Self::get_account_index(account)?; 635 | 636 | // ocw length 637 | let ocw_length = Self::get_ocw_length(); 638 | // if no ocw works in last session, then all new ocw valid for all accounts with all data source 639 | if ocw_length == 0 { 640 | return Ok(()) 641 | } 642 | 643 | // ensure ocw index is valid 644 | ensure!(ocw_index <= ocw_length, >::OffchainWorkerIndexOverflow); 645 | 646 | // ensure data source is valid 647 | ensure!(data_source != urls::DataSource::Invalid, >::InvalidDataSource); 648 | 649 | // get data source index 650 | let data_source_index = urls::data_source_to_index(data_source); 651 | 652 | // query task rounds 653 | let query_task_redudancy: u32 = T::QueryTaskRedundancy::get(); 654 | 655 | // task number per round 656 | let total_task_per_round = 657 | urls::TOTAL_DATA_SOURCE_NUMBER * Self::get_claim_account_length(); 658 | 659 | // task index in the first round 660 | let task_base_index = 661 | data_source_index + ocw_account_index * urls::TOTAL_DATA_SOURCE_NUMBER; 662 | 663 | let mut round: u32 = 0; 664 | while round < query_task_redudancy { 665 | // task index in n round 666 | let task_index = task_base_index + round * total_task_per_round; 667 | 668 | if task_index >= ocw_index { 669 | // if index match return Ok 670 | if (task_index - ocw_index) % ocw_length == 0 { 671 | return Ok(()) 672 | } 673 | } 674 | round = round + 1; 675 | } 676 | 677 | // no match found, return error 678 | Err(>::InvalidCommitSlot.into()) 679 | } 680 | 681 | // get claim account index 682 | fn get_account_index(account: T::AccountId) -> Result> { 683 | match Self::claim_account_index(account) { 684 | Some(index) => Ok(index), 685 | None => Err(>::InvalidAccountIndex.into()), 686 | } 687 | } 688 | 689 | // Check data source 690 | fn valid_data_source(data_source: urls::DataSource) -> dispatch::DispatchResult { 691 | match data_source { 692 | urls::DataSource::Invalid => Err(>::InvalidDataSource.into()), 693 | _ => Ok(()), 694 | } 695 | } 696 | 697 | // Check the block number 698 | fn valid_commit_block_number( 699 | commit_block_number: T::BlockNumber, 700 | current_block_number: T::BlockNumber, 701 | ) -> dispatch::DispatchResult { 702 | let zero_block: u32 = 0; 703 | let commit_block_number: u32 = TryInto::::try_into(commit_block_number) 704 | .map_or(zero_block, |block_number| block_number as u32); 705 | let current_block_number: u32 = TryInto::::try_into(current_block_number) 706 | .map_or(zero_block, |block_number| block_number as u32); 707 | 708 | // Basic check for both block number 709 | if commit_block_number == 0 || current_block_number == 0 { 710 | return Err(>::InvalidCommitBlockNumber.into()) 711 | } 712 | 713 | // Compute the scope of session 714 | let sesseion_start_block = 715 | commit_block_number - commit_block_number % T::QuerySessionLength::get(); 716 | let sesseion_end_block = sesseion_start_block + T::QuerySessionLength::get(); 717 | 718 | // If commit block number out of the scope of session. 719 | if current_block_number >= sesseion_end_block || 720 | current_block_number <= sesseion_start_block 721 | { 722 | return Err(>::InvalidCommitBlockNumber.into()) 723 | } 724 | 725 | Ok(()) 726 | } 727 | 728 | // Get index from map or use length of map for new ocw 729 | fn get_ocw_index(account: Option<&T::AccountId>) -> u32 { 730 | match account { 731 | Some(account) => match Self::ocw_account_index(account) { 732 | Some(index_in_map) => index_in_map, 733 | None => Self::get_ocw_length(), 734 | }, 735 | None => Self::get_ocw_length(), 736 | } 737 | } 738 | 739 | // Get the length of accounts 740 | fn get_ocw_length() -> u32 { 741 | >::iter().collect::>().len() as u32 742 | } 743 | 744 | // Get the length of accounts 745 | fn get_claim_account_length() -> u32 { 746 | >::iter().collect::>().len() as u32 747 | } 748 | 749 | fn get_balance_from_etherscan( 750 | account: &T::AccountId, 751 | info: &urls::TokenInfo, 752 | ) -> Option { 753 | if info.etherscan.len() == 0 { 754 | None 755 | } else { 756 | match core::str::from_utf8(&info.etherscan) { 757 | Ok(token) => { 758 | let get = urls::HttpGet { 759 | blockchain: urls::BlockChainType::ETH, 760 | prefix: "https://api-ropsten.etherscan.io/api?module=account&action=balancemulti&address=0x", 761 | delimiter: ",0x", 762 | postfix: "&tag=latest&apikey=", 763 | api_token: token, 764 | }; 765 | 766 | Self::fetch_balances( 767 | >::eth_addresses(account), 768 | urls::HttpRequest::GET(get), 769 | &urls::parse_etherscan_balances, 770 | ) 771 | .ok() 772 | }, 773 | Err(_) => None, 774 | } 775 | } 776 | } 777 | 778 | fn get_balance_from_infura(account: &T::AccountId, info: &urls::TokenInfo) -> Option { 779 | if info.infura.len() == 0 { 780 | None 781 | } else { 782 | match core::str::from_utf8(&info.infura) { 783 | Ok(token) => { 784 | let post = urls::HttpPost { 785 | url_main: "https://ropsten.infura.io/v3/", 786 | blockchain: urls::BlockChainType::ETH, 787 | prefix: r#"[{"jsonrpc":"2.0","method":"eth_getBalance","id":1,"params":["0x"#, 788 | delimiter: r#"","latest"]},{"jsonrpc":"2.0","method":"eth_getBalance","id":1,"params":["0x"#, 789 | postfix: r#"","latest"]}]"#, 790 | api_token: token, 791 | }; 792 | Self::fetch_balances( 793 | >::eth_addresses(account), 794 | urls::HttpRequest::POST(post), 795 | &urls::parse_blockchain_info_balances, 796 | ) 797 | .ok() 798 | }, 799 | Err(_) => None, 800 | } 801 | } 802 | } 803 | 804 | // TODO account not input request parameter 805 | fn get_balance_from_blockchain_info( 806 | _account: &T::AccountId, 807 | info: &urls::TokenInfo, 808 | ) -> Option { 809 | if info.blockchain.len() == 0 { 810 | None 811 | } else { 812 | match core::str::from_utf8(&info.blockchain) { 813 | Ok(token) => { 814 | let get = urls::HttpGet { 815 | blockchain: urls::BlockChainType::BTC, 816 | prefix: "https://blockchain.info/balance?active=", 817 | delimiter: "%7C", 818 | postfix: "", 819 | api_token: token, 820 | }; 821 | Self::fetch_balances( 822 | Vec::new(), 823 | urls::HttpRequest::GET(get), 824 | &urls::parse_blockchain_info_balances, 825 | ) 826 | .ok() 827 | }, 828 | Err(_) => None, 829 | } 830 | } 831 | } 832 | 833 | // Sign the query result 834 | fn offchain_signed_tx( 835 | account: T::AccountId, 836 | block_number: T::BlockNumber, 837 | data_source: urls::DataSource, 838 | balance: u128, 839 | ) { 840 | log::info!( 841 | "ocw sign tx: account {:?}, block number {:?}, data_source {:?}, balance {:?}", 842 | account.clone(), 843 | block_number, 844 | data_source, 845 | balance 846 | ); 847 | // Get signer from ocw 848 | let signer = Signer::::any_account(); 849 | 850 | let result = signer.send_signed_transaction(|_acct| 851 | // This is the on-chain function 852 | Call::submit_balance(account.clone(), block_number, data_source, balance)); 853 | 854 | // Display error if the signed tx fails. 855 | if let Some((acc, res)) = result { 856 | if res.is_err() { 857 | log::error!("failure: offchain_signed_tx: tx sent: {:?}", acc.id); 858 | } else { 859 | log::info!( 860 | "successful: offchain_signed_tx: tx sent: {:?} index is {:?}", 861 | acc.id, 862 | acc.index 863 | ); 864 | } 865 | 866 | // Record the account in local storage then we can know my index 867 | let account = StorageValueRef::persistent(b"offchain-worker::account"); 868 | account.set(&acc.id); 869 | } else { 870 | log::error!("No local account available"); 871 | } 872 | } 873 | 874 | // Generic function to fetch balance for specific link type 875 | pub fn fetch_balances( 876 | wallet_accounts: Vec<[u8; 20]>, 877 | request: urls::HttpRequest, 878 | parser: &dyn Fn(&str) -> Option>, 879 | ) -> Result> { 880 | // Return if no account linked 881 | if wallet_accounts.len() == 0 { 882 | return Ok(0_u128) 883 | } 884 | 885 | let result: Vec = match request { 886 | urls::HttpRequest::GET(get_req) => { 887 | // Compose the get request URL 888 | let mut link: Vec = Vec::new(); 889 | link.extend(get_req.prefix.as_bytes()); 890 | 891 | for (i, each_account) in wallet_accounts.iter().enumerate() { 892 | // Append delimiter if there are more than one accounts in the account_vec 893 | if i >= 1 { 894 | link.extend(get_req.delimiter.as_bytes()); 895 | }; 896 | 897 | link.extend(utils::address_to_string(each_account)); 898 | } 899 | link.extend(get_req.postfix.as_bytes()); 900 | link.extend(get_req.api_token.as_bytes()); 901 | 902 | // Fetch json response via http get 903 | urls::fetch_json_http_get(&link[..]).map_err(|_| Error::::InvalidNumber)? 904 | }, 905 | 906 | urls::HttpRequest::POST(post_req) => { 907 | // Compose the post request URL 908 | let mut link: Vec = Vec::new(); 909 | link.extend(post_req.url_main.as_bytes()); 910 | link.extend(post_req.api_token.as_bytes()); 911 | 912 | // Batch multiple JSON-RPC calls for multiple getBalance operations within one post 913 | let mut body: Vec = Vec::new(); 914 | body.extend(post_req.prefix.as_bytes()); 915 | 916 | for (i, each_account) in wallet_accounts.iter().enumerate() { 917 | // Append delimiter if there are more than one accounts in the account_vec 918 | if i >= 1 { 919 | body.extend(post_req.delimiter.as_bytes()); 920 | }; 921 | 922 | body.extend(utils::address_to_string(each_account)); 923 | } 924 | body.extend(post_req.postfix.as_bytes()); 925 | 926 | // Fetch json response via http post 927 | urls::fetch_json_http_post(&link[..], &body[..]) 928 | .map_err(|_| Error::::InvalidNumber)? 929 | }, 930 | }; 931 | 932 | let response = 933 | sp_std::str::from_utf8(&result).map_err(|_| Error::::InvalidNumber)?; 934 | let balances = parser(response); 935 | 936 | match balances { 937 | Some(data) => { 938 | let mut total_balance: u128 = 0; 939 | // Sum up the balance 940 | for balance in data { 941 | total_balance = total_balance + balance; 942 | } 943 | Ok(total_balance) 944 | }, 945 | None => Ok(0_u128), 946 | } 947 | } 948 | } 949 | } 950 | --------------------------------------------------------------------------------