├── .babelrc ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── .npmignore ├── .solcover.js ├── .soliumignore ├── .soliumrc.json ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── RELEASE.md ├── artifacts └── .gitkeep ├── contracts ├── .gitignore ├── contracts │ ├── Migrations.sol │ ├── identity │ │ ├── ClaimHolder.sol │ │ ├── ClaimHolderLibrary.sol │ │ ├── ClaimHolderPresigned.sol │ │ ├── ClaimHolderRegistered.sol │ │ ├── ClaimVerifier.sol │ │ ├── ERC725.sol │ │ ├── ERC735.sol │ │ ├── KeyHolder.sol │ │ ├── KeyHolderLibrary.sol │ │ ├── OriginIdentity.sol │ │ └── V00_UserRegistry.sol │ ├── marketplace │ │ └── v00 │ │ │ └── Marketplace.sol │ ├── multisig │ │ └── IMultiSigWallet.sol │ └── token │ │ ├── OriginToken.sol │ │ ├── TokenMigration.sol │ │ └── WhitelistedPausableToken.sol ├── migrations │ ├── 10_verify_mainnet.js │ ├── 11_update_claim_holder_library.js │ ├── 1_initial_migration.js │ ├── 2_deploy_marketplace_contracts.js │ ├── 3_create_sample_listings.js │ ├── 4_deploy_identity_contracts.js │ ├── 5_create_sample_issuers.js │ ├── 6_transfer_tokens_for_dev.js │ ├── 7_create_token_whitelist.js │ ├── 8_whitelist_affiliate.js │ ├── 9_transfer_ownership_and_tokens_for_mainnet.js │ └── README.md ├── releases │ ├── 0.6.0 │ │ ├── 0.6.0_rinkeby.log │ │ └── 0.6.0_ropsten.log │ ├── 0.7.0 │ │ ├── 0.7.0_rinkeby.log │ │ ├── 0.7.0_ropsten.log │ │ ├── README.md │ │ └── build │ │ │ └── contracts │ │ │ ├── ClaimHolder.json │ │ │ ├── ClaimHolderLibrary.json │ │ │ ├── ClaimHolderPresigned.json │ │ │ ├── ClaimHolderRegistered.json │ │ │ ├── ClaimVerifier.json │ │ │ ├── ERC725.json │ │ │ ├── ERC735.json │ │ │ ├── FractionalListing.json │ │ │ ├── KeyHolder.json │ │ │ ├── KeyHolderLibrary.json │ │ │ ├── Listing.json │ │ │ ├── ListingsRegistry.json │ │ │ ├── ListingsRegistryStorage.json │ │ │ ├── Migrations.json │ │ │ ├── OriginIdentity.json │ │ │ ├── Purchase.json │ │ │ ├── PurchaseLibrary.json │ │ │ ├── UnitListing.json │ │ │ └── UserRegistry.json │ ├── 0.8.0 │ │ ├── 0.8.0_mainnet.log │ │ ├── 0.8.0_rinkeby.log │ │ └── build │ │ │ └── contracts │ │ │ ├── BasicToken.json │ │ │ ├── BurnableToken.json │ │ │ ├── ClaimHolder.json │ │ │ ├── ClaimHolderLibrary.json │ │ │ ├── ClaimHolderPresigned.json │ │ │ ├── ClaimHolderRegistered.json │ │ │ ├── ClaimVerifier.json │ │ │ ├── DetailedERC20.json │ │ │ ├── ERC20.json │ │ │ ├── ERC20Basic.json │ │ │ ├── ERC725.json │ │ │ ├── ERC735.json │ │ │ ├── FractionalListing.json │ │ │ ├── IMultiSigWallet.json │ │ │ ├── KeyHolder.json │ │ │ ├── KeyHolderLibrary.json │ │ │ ├── Listing.json │ │ │ ├── ListingsRegistry.json │ │ │ ├── ListingsRegistryStorage.json │ │ │ ├── Migrations.json │ │ │ ├── MintableToken.json │ │ │ ├── OriginIdentity.json │ │ │ ├── OriginToken.json │ │ │ ├── Ownable.json │ │ │ ├── Pausable.json │ │ │ ├── PausableToken.json │ │ │ ├── Purchase.json │ │ │ ├── PurchaseLibrary.json │ │ │ ├── SafeMath.json │ │ │ ├── StandardToken.json │ │ │ ├── TokenMigration.json │ │ │ ├── UnitListing.json │ │ │ ├── UserRegistry.json │ │ │ ├── V00_Marketplace.json │ │ │ ├── V00_UserRegistry.json │ │ │ ├── V01_Marketplace.json │ │ │ └── WhitelistedPausableToken.json │ ├── 0.8.1 │ │ ├── 0.8.1_mainnet.log │ │ ├── 0.8.1_rinkeby.log │ │ └── build │ │ │ └── contracts │ │ │ ├── BasicToken.json │ │ │ ├── BurnableToken.json │ │ │ ├── ClaimHolder.json │ │ │ ├── ClaimHolderLibrary.json │ │ │ ├── ClaimHolderPresigned.json │ │ │ ├── ClaimHolderRegistered.json │ │ │ ├── ClaimVerifier.json │ │ │ ├── DetailedERC20.json │ │ │ ├── ERC20.json │ │ │ ├── ERC20Basic.json │ │ │ ├── ERC725.json │ │ │ ├── ERC735.json │ │ │ ├── IMultiSigWallet.json │ │ │ ├── KeyHolder.json │ │ │ ├── KeyHolderLibrary.json │ │ │ ├── Migrations.json │ │ │ ├── MintableToken.json │ │ │ ├── OriginIdentity.json │ │ │ ├── OriginToken.json │ │ │ ├── Ownable.json │ │ │ ├── Pausable.json │ │ │ ├── PausableToken.json │ │ │ ├── SafeMath.json │ │ │ ├── StandardToken.json │ │ │ ├── TokenMigration.json │ │ │ ├── V00_Marketplace.json │ │ │ ├── V00_UserRegistry.json │ │ │ └── WhitelistedPausableToken.json │ └── 0.8.4 │ │ ├── 0.8.4_mainnet.log │ │ ├── 0.8.4_rinkeby.log │ │ └── build │ │ └── contracts │ │ ├── BasicToken.json │ │ ├── BurnableToken.json │ │ ├── ClaimHolder.json │ │ ├── ClaimHolderLibrary.json │ │ ├── ClaimHolderPresigned.json │ │ ├── ClaimHolderRegistered.json │ │ ├── ClaimVerifier.json │ │ ├── DetailedERC20.json │ │ ├── ERC20.json │ │ ├── ERC20Basic.json │ │ ├── ERC725.json │ │ ├── ERC735.json │ │ ├── IMultiSigWallet.json │ │ ├── KeyHolder.json │ │ ├── KeyHolderLibrary.json │ │ ├── Migrations.json │ │ ├── MintableToken.json │ │ ├── OriginIdentity.json │ │ ├── OriginToken.json │ │ ├── Ownable.json │ │ ├── Pausable.json │ │ ├── PausableToken.json │ │ ├── SafeMath.json │ │ ├── StandardToken.json │ │ ├── TokenMigration.json │ │ ├── V00_Marketplace.json │ │ ├── V00_UserRegistry.json │ │ └── WhitelistedPausableToken.json ├── test-alt │ ├── Arbitrator.js │ ├── ClaimHolderPresigned.js │ ├── ClaimVerifier.js │ ├── Identity.js │ ├── Marketplace.js │ ├── MarketplaceLib.js │ ├── OriginToken.js │ ├── TokenLib.js │ ├── TokenMigration.js │ ├── UserRegistry.js │ ├── WhitelistedPausableToken.js │ ├── _gasPriceInDollars.js │ ├── _helper.js │ ├── _marketplaceHelpers.js │ └── contracts │ │ ├── MultiSigWallet.sol │ │ ├── OriginArbitrator.sol │ │ ├── Spender.sol │ │ ├── Token.sol │ │ └── arbitration │ │ ├── Arbitrable.sol │ │ ├── ArbitrableExample.sol │ │ ├── Arbitrator.sol │ │ └── CentralizedArbitrator.sol ├── test │ └── openzeppelin │ │ ├── README.md │ │ ├── helpers │ │ ├── assertJump.js │ │ ├── assertRevert.js │ │ ├── expectEvent.js │ │ └── originTokenMocks.js │ │ └── token │ │ └── ERC20 │ │ ├── BasicToken.test.js │ │ ├── BurnableToken.behaviour.js │ │ ├── BurnableToken.test.js │ │ ├── MintableToken.behaviour.js │ │ ├── MintableToken.test.js │ │ ├── PausableToken.test.js │ │ └── StandardToken.test.js └── truffle.js ├── daemon └── indexing │ ├── .gitignore │ ├── .npmignore │ ├── README.md │ ├── apollo │ └── index.js │ ├── lib │ ├── db.js │ └── search.js │ ├── listener │ ├── README.md │ └── listener.js │ ├── migrations │ ├── 20180815061230-add-listing-table.js │ ├── 20180820041726-add-offer-table.js │ └── sqls │ │ ├── 20180815061230-add-listing-table-down.sql │ │ ├── 20180815061230-add-listing-table-up.sql │ │ ├── 20180820041726-add-offer-table-down.sql │ │ └── 20180820041726-add-offer-table-up.sql │ ├── package-lock.json │ └── package.json ├── data_migrations ├── v0.6_listings_to_listings_registry │ ├── Listing_v0_1.json │ ├── ListingsRegistry_v0_2.json │ ├── README.md │ ├── conf-sample.json │ └── migrate_listings_to_listings_registry.js └── v0.7_listings_registry_to_storage │ ├── ListingsRegistry_new.json │ ├── ListingsRegistry_v0_6.json │ ├── README.md │ ├── conf-rinkeby.json │ ├── conf-ropsten.json │ ├── conf-sample.json │ └── migrate_listings_registry_to_storage.js ├── package-lock.json ├── package.json ├── scripts ├── build.js ├── helpers │ ├── build-contracts.js │ ├── deploy-contracts.js │ ├── minify-contracts.js │ ├── start-ganache.js │ ├── start-ipfs.js │ ├── start-test-server.js │ ├── test-contracts.js │ ├── test-javascript.js │ ├── test-js-format.js │ ├── test-solidity-format.js │ └── test-truffle.js ├── sample-data.js ├── sample-words.json ├── test-env.js ├── test-js.js ├── test-truffle.js └── test.js ├── src ├── README.md ├── contractInterface │ ├── README.md │ ├── marketplace │ │ ├── resolver.js │ │ └── v00_adapter.js │ └── users │ │ ├── resolver.js │ │ └── v00_adapter.js ├── index.js ├── ipfsInterface │ ├── README.md │ ├── adapters │ │ ├── adapter-base.js │ │ ├── adapter-factory.js │ │ ├── dispute │ │ │ └── v1-dispute-adapter.js │ │ ├── listing-withdraw │ │ │ └── v1-listing-withdraw-adapter.js │ │ ├── listing │ │ │ └── v1-listing-adapter.js │ │ ├── offer-accept │ │ │ └── v1-offer-accept-adapter.js │ │ ├── offer-withdraw │ │ │ └── v1-offer-withdraw-adapter.js │ │ ├── offer │ │ │ └── v1-offer-adapter.js │ │ ├── profile │ │ │ └── v1-profile-adapter.js │ │ ├── resolution │ │ │ └── v1-resolution-adapter.js │ │ └── review │ │ │ └── v1-review-adapter.js │ ├── schemas │ │ ├── dispute.json │ │ ├── listing-withdraw.json │ │ ├── listing.json │ │ ├── notification.json │ │ ├── offer-accept.json │ │ ├── offer-withdraw.json │ │ ├── offer.json │ │ ├── profile.json │ │ ├── resolution.json │ │ └── review.json │ └── store.js ├── models │ ├── attestation.js │ ├── listing.js │ ├── money.js │ ├── notification.js │ ├── offer.js │ ├── review.js │ └── user.js ├── resources │ ├── attestations.js │ ├── discovery.js │ ├── marketplace.js │ ├── messaging.js │ ├── token.js │ └── users.js ├── services │ ├── contract-service.js │ └── ipfs-service.js └── utils │ ├── cookieStorage.js │ ├── id.js │ └── retries.js ├── test ├── contract-service.test.js ├── fixtures.js ├── fixtures │ ├── hawaii-house │ │ ├── hawaii-house.json │ │ ├── image-1.jpg │ │ ├── image-2.jpg │ │ ├── image-3.jpg │ │ ├── image-4.jpg │ │ └── image-5.jpg │ ├── lake-house │ │ ├── image-1.jpg │ │ ├── image-2.jpg │ │ ├── image-3.jpg │ │ ├── image-4.jpg │ │ ├── image-5.jpg │ │ ├── image-6.jpg │ │ └── lake-house.json │ ├── listing-valid.json │ ├── offer-valid.json │ ├── profile-valid.json │ ├── review-valid.json │ ├── scout │ │ ├── image-1.jpg │ │ ├── image-2.jpg │ │ ├── image-3.jpg │ │ ├── image-4.jpg │ │ └── scout.json │ ├── taylor-swift-tix │ │ ├── image-1.jpg │ │ ├── image-2.jpg │ │ ├── image-3.jpg │ │ ├── image-4.jpg │ │ ├── image-5.jpg │ │ └── taylor-swift-tix.json │ └── zinc-house │ │ ├── image-1 │ │ ├── image-1.jpg │ │ ├── image-10.jpg │ │ ├── image-2.jpg │ │ ├── image-3.jpg │ │ ├── image-4.jpg │ │ ├── image-5.jpg │ │ ├── image-7.jpg │ │ ├── image-8.jpg │ │ ├── image-9.jpg │ │ └── zinc-house.json ├── helpers │ ├── as-account.js │ └── contract-service-helper.js ├── index.js ├── ipfs_store.test.js ├── model-listing.test.js ├── resource_attestations.test.js ├── resource_marketplace.test.js ├── resource_messaging.test.js ├── resource_origin_token_test.js ├── resource_users.test.js ├── service_ipfs.test.js └── webpack.config.js ├── token ├── .gitignore ├── .npmignore ├── README.md ├── faucet │ ├── app.js │ └── public │ │ ├── index.html │ │ └── static │ │ ├── css │ │ ├── styles.css │ │ └── variables.css │ │ └── img │ │ └── faucet.png ├── lib │ ├── _contractHelper.js │ ├── config.js │ ├── marketplace.js │ ├── owner_whitelist.js │ └── token.js ├── package-lock.json ├── package.json └── scripts │ ├── README.md │ └── token_cli.js └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["babel-preset-env"], 3 | "plugins": ["transform-object-rest-spread"] 4 | } 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Solidity syntax highlighting 2 | # see: https://github.com/github/linguist/pull/3973 3 | *.sol linguist-language=Solidity 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ⚠️ **WARNING** ⚠️ 2 | 3 | This repository has been deprecated! 4 | 5 | Please file all new issues in [OriginProtocol/origin](https://github.com/OriginProtocol/origin/issues/new). 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ⚠️ **WARNING** ⚠️ 2 | 3 | This repository has been deprecated! 4 | 5 | Please create all new pull requests in [OriginProtocol/origin](https://github.com/OriginProtocol/origin/compare). 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # Truffle build artifacts 4 | # https://ethereum.stackexchange.com/questions/19486/storing-a-truffle-contract-interface-in-version-control 5 | /build 6 | 7 | # compiled output 8 | /dist 9 | 10 | # dependencies 11 | /node_modules 12 | 13 | # IDEs and editors 14 | .idea/ 15 | /.idea 16 | /.idea 17 | .project 18 | .classpath 19 | .c9/ 20 | *.launch 21 | .settings/ 22 | *.sublime-workspace 23 | 24 | # IDE - VSCode 25 | .vscode/* 26 | !.vscode/settings.json 27 | !.vscode/tasks.json 28 | !.vscode/launch.json 29 | !.vscode/extensions.json 30 | 31 | # misc 32 | /.sass-cache 33 | /connect.lock 34 | /coverage 35 | /coverage.json 36 | /libpeerconnection.log 37 | npm-debug.log 38 | testem.log 39 | /typings 40 | .DS_Store 41 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /daemon 6 | 7 | # IDEs and editors 8 | /.idea 9 | .project 10 | .classpath 11 | .c9/ 12 | *.launch 13 | .settings/ 14 | *.sublime-workspace 15 | 16 | # IDE - VSCode 17 | .vscode/* 18 | !.vscode/settings.json 19 | !.vscode/tasks.json 20 | !.vscode/launch.json 21 | !.vscode/extensions.json 22 | 23 | # misc 24 | /.sass-cache 25 | /connect.lock 26 | /coverage 27 | /libpeerconnection.log 28 | npm-debug.log 29 | testem.log 30 | /typings 31 | -------------------------------------------------------------------------------- /.solcover.js: -------------------------------------------------------------------------------- 1 | require('babel-register') 2 | require('babel-polyfill') 3 | 4 | module.exports = { 5 | copyPackages: ['openzeppelin-solidity'], 6 | dir: './contracts', 7 | // for Mocha tests 8 | testCommand: 'mocha -r babel-register -r babel-polyfill -t 10000 --exit ../contracts/test-alt/', 9 | // for Truffle tests (this runs out of gas) 10 | // testCommand: 'npx truffle test --network coverage', 11 | } 12 | -------------------------------------------------------------------------------- /.soliumignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | contracts/contracts/Migrations.sol 3 | -------------------------------------------------------------------------------- /.soliumrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "solium:recommended", 3 | "plugins": [ 4 | "security" 5 | ], 6 | "rules": { 7 | "security/no-inline-assembly": "off", 8 | "security/no-tx-origin": "off", 9 | "security/no-block-members": "off", 10 | "security/no-low-level-calls": "off", 11 | "security/no-send": "off", 12 | "max-len": "off", 13 | "no-empty-blocks": "off", 14 | "quotes": [ 15 | "error", 16 | "double" 17 | ], 18 | "indentation": [ 19 | "error", 20 | 4 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "lts/*" 4 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Origin.js 2 | 3 | Thanks for helping out! 👍 4 | 5 | Before you summit a PR, you'll want to make sure that: 6 | 7 | 1. Any changes are tested. 8 | 2. All tests pass. (See each package's readme for instructions on testing that package.) 9 | 3. The formatting is correct. Just run `npm run format` in a package folder. 10 | 11 | If this is a new feature, make sure you've discussed it with [our #engineering channel on Discord](https://www.originprotocol.com/discord). 12 | 13 | 14 | ### Coding style: Javascript 15 | 16 | We use [NPM style](https://docs.npmjs.com/misc/coding-style), as automated by the [prettier](https://prettier.io) tool. 2 space indents, no semi-semicolons. 17 | 18 | ### Coding style: Solidity 19 | 20 | We use two space indents. Just copy the surrounding style and use your good judgement. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Origin Protocol 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![origin_github_banner](https://user-images.githubusercontent.com/673455/37314301-f8db9a90-2618-11e8-8fee-b44f38febf38.png) 2 | 3 | This repo is no longer active. Please go to https://github.com/OriginProtocol/origin 4 | -------------------------------------------------------------------------------- /artifacts/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/artifacts/.gitkeep -------------------------------------------------------------------------------- /contracts/.gitignore: -------------------------------------------------------------------------------- 1 | # Truffle build artifacts 2 | # https://ethereum.stackexchange.com/questions/19486/storing-a-truffle-contract-interface-in-version-control 3 | /build 4 | 5 | .idea/ 6 | /.idea 7 | 8 | # dependencies 9 | /node_modules 10 | 11 | secret/ 12 | -------------------------------------------------------------------------------- /contracts/contracts/Migrations.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | contract Migrations { 4 | address public owner; 5 | uint public last_completed_migration; 6 | 7 | modifier restricted() { 8 | if (msg.sender == owner) _; 9 | } 10 | 11 | constructor() public { 12 | owner = msg.sender; 13 | } 14 | 15 | function setCompleted(uint completed) public restricted { 16 | last_completed_migration = completed; 17 | } 18 | 19 | function upgrade(address new_address) public restricted { 20 | Migrations upgraded = Migrations(new_address); 21 | upgraded.setCompleted(last_completed_migration); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ClaimHolder.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ERC735.sol"; 4 | import "./KeyHolder.sol"; 5 | import "./ClaimHolderLibrary.sol"; 6 | 7 | 8 | contract ClaimHolder is KeyHolder, ERC735 { 9 | 10 | ClaimHolderLibrary.Claims claims; 11 | 12 | function addClaim( 13 | uint256 _topic, 14 | uint256 _scheme, 15 | address _issuer, 16 | bytes _signature, 17 | bytes _data, 18 | string _uri 19 | ) 20 | public 21 | returns (bytes32 claimRequestId) 22 | { 23 | return ClaimHolderLibrary.addClaim( 24 | keyHolderData, 25 | claims, 26 | _topic, 27 | _scheme, 28 | _issuer, 29 | _signature, 30 | _data, 31 | _uri 32 | ); 33 | } 34 | 35 | function addClaims( 36 | uint256[] _topic, 37 | address[] _issuer, 38 | bytes _signature, 39 | bytes _data, 40 | uint256[] _offsets 41 | ) 42 | public 43 | { 44 | ClaimHolderLibrary.addClaims( 45 | keyHolderData, 46 | claims, 47 | _topic, 48 | _issuer, 49 | _signature, 50 | _data, 51 | _offsets 52 | ); 53 | } 54 | 55 | function removeClaim(bytes32 _claimId) public returns (bool success) { 56 | return ClaimHolderLibrary.removeClaim(keyHolderData, claims, _claimId); 57 | } 58 | 59 | function getClaim(bytes32 _claimId) 60 | public 61 | view 62 | returns( 63 | uint256 topic, 64 | uint256 scheme, 65 | address issuer, 66 | bytes signature, 67 | bytes data, 68 | string uri 69 | ) 70 | { 71 | return ClaimHolderLibrary.getClaim(claims, _claimId); 72 | } 73 | 74 | function getClaimIdsByTopic(uint256 _topic) 75 | public 76 | view 77 | returns(bytes32[] claimIds) 78 | { 79 | return claims.byTopic[_topic]; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ClaimHolderPresigned.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ClaimHolderRegistered.sol"; 4 | 5 | /** 6 | * NOTE: This contract exists as a convenience for deploying an identity with 7 | * some 'pre-signed' claims. If you don't care about that, just use ClaimHolder 8 | * instead. 9 | */ 10 | 11 | 12 | contract ClaimHolderPresigned is ClaimHolderRegistered { 13 | 14 | constructor( 15 | address _userRegistryAddress, 16 | uint256[] _topic, 17 | address[] _issuer, 18 | bytes _signature, 19 | bytes _data, 20 | uint256[] _offsets 21 | ) 22 | ClaimHolderRegistered(_userRegistryAddress) 23 | public 24 | { 25 | ClaimHolderLibrary.addClaims( 26 | keyHolderData, 27 | claims, 28 | _topic, 29 | _issuer, 30 | _signature, 31 | _data, 32 | _offsets 33 | ); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ClaimHolderRegistered.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ClaimHolder.sol"; 4 | import "./V00_UserRegistry.sol"; 5 | 6 | 7 | contract ClaimHolderRegistered is ClaimHolder { 8 | 9 | constructor ( 10 | address _userRegistryAddress 11 | ) 12 | public 13 | { 14 | V00_UserRegistry userRegistry = V00_UserRegistry(_userRegistryAddress); 15 | userRegistry.registerUser(); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ClaimVerifier.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ClaimHolder.sol"; 4 | 5 | 6 | contract ClaimVerifier { 7 | 8 | event ClaimValid(ClaimHolder _identity, uint256 topic); 9 | event ClaimInvalid(ClaimHolder _identity, uint256 topic); 10 | 11 | ClaimHolder public trustedClaimHolder; 12 | 13 | constructor(address _trustedClaimHolder) public { 14 | trustedClaimHolder = ClaimHolder(_trustedClaimHolder); 15 | } 16 | 17 | function checkClaim(ClaimHolder _identity, uint256 topic) 18 | public 19 | returns (bool claimValid) 20 | { 21 | if (claimIsValid(_identity, topic)) { 22 | emit ClaimValid(_identity, topic); 23 | return true; 24 | } else { 25 | emit ClaimInvalid(_identity, topic); 26 | return false; 27 | } 28 | } 29 | 30 | function claimIsValid(ClaimHolder _identity, uint256 topic) 31 | public 32 | view 33 | returns (bool claimValid) 34 | { 35 | uint256 foundTopic; 36 | uint256 scheme; 37 | address issuer; 38 | bytes memory sig; 39 | bytes memory data; 40 | 41 | // Construct claimId (identifier + claim type) 42 | bytes32 claimId = keccak256(abi.encodePacked(trustedClaimHolder, topic)); 43 | 44 | // Fetch claim from user 45 | ( foundTopic, scheme, issuer, sig, data, ) = _identity.getClaim(claimId); 46 | 47 | bytes32 dataHash = keccak256(abi.encodePacked(_identity, topic, data)); 48 | bytes32 prefixedHash = keccak256(abi.encodePacked("\x19Ethereum Signed Message:\n32", dataHash)); 49 | 50 | // Recover address of data signer 51 | address recovered = getRecoveredAddress(sig, prefixedHash); 52 | 53 | // Take hash of recovered address 54 | bytes32 hashedAddr = keccak256(abi.encodePacked(recovered)); 55 | 56 | // Does the trusted identifier have they key which signed the user's claim? 57 | return trustedClaimHolder.keyHasPurpose(hashedAddr, 3); 58 | } 59 | 60 | function getRecoveredAddress(bytes sig, bytes32 dataHash) 61 | public 62 | pure 63 | returns (address addr) 64 | { 65 | bytes32 ra; 66 | bytes32 sa; 67 | uint8 va; 68 | 69 | // Check the signature length 70 | if (sig.length != 65) { 71 | return (0); 72 | } 73 | 74 | // Divide the signature in r, s and v variables 75 | assembly { 76 | ra := mload(add(sig, 32)) 77 | sa := mload(add(sig, 64)) 78 | va := byte(0, mload(add(sig, 96))) 79 | } 80 | 81 | if (va < 27) { 82 | va += 27; 83 | } 84 | 85 | address recoveredAddress = ecrecover(dataHash, va, ra, sa); 86 | 87 | return (recoveredAddress); 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ERC725.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | 4 | contract ERC725 { 5 | 6 | uint256 constant MANAGEMENT_KEY = 1; 7 | uint256 constant ACTION_KEY = 2; 8 | uint256 constant CLAIM_SIGNER_KEY = 3; 9 | uint256 constant ENCRYPTION_KEY = 4; 10 | 11 | event KeyAdded(bytes32 indexed key, uint256 indexed purpose, uint256 indexed keyType); 12 | event KeyRemoved(bytes32 indexed key, uint256 indexed purpose, uint256 indexed keyType); 13 | event ExecutionRequested(uint256 indexed executionId, address indexed to, uint256 indexed value, bytes data); 14 | event Executed(uint256 indexed executionId, address indexed to, uint256 indexed value, bytes data); 15 | event Approved(uint256 indexed executionId, bool approved); 16 | 17 | function getKey(bytes32 _key) public view returns(uint256[] purposes, uint256 keyType, bytes32 key); 18 | function keyHasPurpose(bytes32 _key, uint256 _purpose) public view returns (bool exists); 19 | function getKeysByPurpose(uint256 _purpose) public view returns(bytes32[] keys); 20 | function addKey(bytes32 _key, uint256 _purpose, uint256 _keyType) public returns (bool success); 21 | function removeKey(bytes32 _key, uint256 _purpose) public returns (bool success); 22 | function execute(address _to, uint256 _value, bytes _data) public returns (uint256 executionId); 23 | function approve(uint256 _id, bool _approve) public returns (bool success); 24 | } 25 | -------------------------------------------------------------------------------- /contracts/contracts/identity/ERC735.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | 4 | contract ERC735 { 5 | 6 | event ClaimRequested(uint256 indexed claimRequestId, uint256 indexed topic, uint256 scheme, address indexed issuer, bytes signature, bytes data, string uri); 7 | event ClaimAdded(bytes32 indexed claimId, uint256 indexed topic, uint256 scheme, address indexed issuer, bytes signature, bytes data, string uri); 8 | event ClaimRemoved(bytes32 indexed claimId, uint256 indexed topic, uint256 scheme, address indexed issuer, bytes signature, bytes data, string uri); 9 | event ClaimChanged(bytes32 indexed claimId, uint256 indexed topic, uint256 scheme, address indexed issuer, bytes signature, bytes data, string uri); 10 | 11 | struct Claim { 12 | uint256 topic; 13 | uint256 scheme; 14 | address issuer; // msg.sender 15 | bytes signature; // this.address + topic + data 16 | bytes data; 17 | string uri; 18 | } 19 | 20 | function getClaim(bytes32 _claimId) public view returns(uint256 topic, uint256 scheme, address issuer, bytes signature, bytes data, string uri); 21 | function getClaimIdsByTopic(uint256 _topic) public view returns(bytes32[] claimIds); 22 | function addClaim(uint256 _topic, uint256 _scheme, address issuer, bytes _signature, bytes _data, string _uri) public returns (bytes32 claimRequestId); 23 | function removeClaim(bytes32 _claimId) public returns (bool success); 24 | } 25 | -------------------------------------------------------------------------------- /contracts/contracts/identity/KeyHolder.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ERC725.sol"; 4 | import "./KeyHolderLibrary.sol"; 5 | 6 | 7 | contract KeyHolder is ERC725 { 8 | KeyHolderLibrary.KeyHolderData keyHolderData; 9 | 10 | constructor() public { 11 | KeyHolderLibrary.init(keyHolderData); 12 | } 13 | 14 | function getKey(bytes32 _key) 15 | public 16 | view 17 | returns(uint256[] purposes, uint256 keyType, bytes32 key) 18 | { 19 | return KeyHolderLibrary.getKey(keyHolderData, _key); 20 | } 21 | 22 | function getKeyPurposes(bytes32 _key) 23 | public 24 | view 25 | returns(uint256[] purposes) 26 | { 27 | return KeyHolderLibrary.getKeyPurposes(keyHolderData, _key); 28 | } 29 | 30 | function getKeysByPurpose(uint256 _purpose) 31 | public 32 | view 33 | returns(bytes32[] _keys) 34 | { 35 | return KeyHolderLibrary.getKeysByPurpose(keyHolderData, _purpose); 36 | } 37 | 38 | function addKey(bytes32 _key, uint256 _purpose, uint256 _type) 39 | public 40 | returns (bool success) 41 | { 42 | return KeyHolderLibrary.addKey(keyHolderData, _key, _purpose, _type); 43 | } 44 | 45 | function approve(uint256 _id, bool _approve) 46 | public 47 | returns (bool success) 48 | { 49 | return KeyHolderLibrary.approve(keyHolderData, _id, _approve); 50 | } 51 | 52 | function execute(address _to, uint256 _value, bytes _data) 53 | public 54 | returns (uint256 executionId) 55 | { 56 | return KeyHolderLibrary.execute(keyHolderData, _to, _value, _data); 57 | } 58 | 59 | function removeKey(bytes32 _key, uint256 _purpose) 60 | public 61 | returns (bool success) 62 | { 63 | return KeyHolderLibrary.removeKey(keyHolderData, _key, _purpose); 64 | } 65 | 66 | function keyHasPurpose(bytes32 _key, uint256 _purpose) 67 | public 68 | view 69 | returns(bool exists) 70 | { 71 | return KeyHolderLibrary.keyHasPurpose(keyHolderData, _key, _purpose); 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /contracts/contracts/identity/OriginIdentity.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ClaimHolder.sol"; 4 | 5 | // This will be deployed exactly once and represents Origin Protocol's 6 | // own identity for use in signing attestations. 7 | 8 | 9 | contract OriginIdentity is ClaimHolder {} 10 | -------------------------------------------------------------------------------- /contracts/contracts/identity/V00_UserRegistry.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./ClaimHolderPresigned.sol"; 4 | 5 | /// @title UserRegistry 6 | /// @dev Used to keep registry of user identifies 7 | /// @author Matt Liu , Josh Fraser , Stan James 8 | 9 | 10 | contract V00_UserRegistry { 11 | /* 12 | * Events 13 | */ 14 | 15 | event NewUser(address _address, address _identity); 16 | 17 | /* 18 | * Storage 19 | */ 20 | 21 | // Mapping from ethereum wallet to ERC725 identity 22 | mapping(address => address) public users; 23 | 24 | /* 25 | * Public functions 26 | */ 27 | 28 | /// @dev registerUser(): Add a user to the registry 29 | function registerUser() 30 | public 31 | { 32 | users[tx.origin] = msg.sender; 33 | emit NewUser(tx.origin, msg.sender); 34 | } 35 | 36 | /// @dev clearUser(): Remove user from the registry 37 | function clearUser() 38 | public 39 | { 40 | users[msg.sender] = 0; 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /contracts/contracts/multisig/IMultiSigWallet.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | 4 | /** 5 | * @title Interface for interacting with Gnosis MultiSigWallet. 6 | * @dev Avoids creating an explicit dependency on Gnosis MultiSigWallet, which 7 | * doesn't have an official npm package. This allows us to just use the ABI 8 | * to call the contract methods. The contract is deployed through the DApp & 9 | * desktop app, so we only need to make limited calls to it. Further signatures 10 | * also happen in the Gnosis apps. 11 | */ 12 | contract IMultiSigWallet { 13 | mapping (address => bool) public isOwner; 14 | uint public required; 15 | 16 | /// @dev Allows an owner to submit and confirm a transaction. 17 | /// @param destination Transaction target address. 18 | /// @param value Transaction ether value. 19 | /// @param data Transaction data payload. 20 | /// @return Returns transaction ID. 21 | function submitTransaction(address destination, uint value, bytes data) 22 | public 23 | returns (uint transactionId); 24 | } 25 | -------------------------------------------------------------------------------- /contracts/contracts/token/TokenMigration.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "../../../node_modules/openzeppelin-solidity/contracts/ownership/Ownable.sol"; 4 | import "../../../node_modules/openzeppelin-solidity/contracts/token/ERC20/ERC20.sol"; 5 | import "./OriginToken.sol"; 6 | 7 | 8 | /** 9 | * @title Migrates balances from one token contract to another 10 | * @dev Migrates all balances from one token contract to another. Both contracts 11 | * must be pausable (to prevent changes during migration), and the target 12 | * contract must support minting tokens. 13 | */ 14 | contract TokenMigration is Ownable { 15 | OriginToken public fromToken; 16 | OriginToken public toToken; 17 | mapping (address => bool) public migrated; 18 | bool public finished; 19 | 20 | event Migrated(address indexed account, uint256 balance); 21 | event MigrationFinished(); 22 | 23 | modifier notFinished() { 24 | require(!finished, "migration already finished"); 25 | _; 26 | } 27 | 28 | // @dev Public constructor 29 | constructor(OriginToken _fromToken, OriginToken _toToken) public { 30 | owner = msg.sender; 31 | fromToken = _fromToken; 32 | toToken = _toToken; 33 | } 34 | 35 | // @dev Migrates a set of accounts, which should be limited in size so that 36 | // the transaction is under the gas limit. 37 | function migrateAccounts(address[] _holders) public onlyOwner notFinished { 38 | for (uint i = 0; i < _holders.length; i++) { 39 | migrateAccount(_holders[i]); 40 | } 41 | } 42 | 43 | // @dev Migrates the balance for a single address by minting the same number 44 | // of new tokens the address had with the old token. 45 | function migrateAccount(address _holder) public onlyOwner notFinished { 46 | require(!migrated[_holder], "holder already migrated"); 47 | uint256 balance = fromToken.balanceOf(_holder); 48 | if (balance > 0) { 49 | toToken.mint(_holder, balance); 50 | migrated[_holder] = true; 51 | emit Migrated(_holder, balance); 52 | } 53 | } 54 | 55 | // @dev Finishes migration and transfers token ownership to new owner. 56 | function finish(address _newTokenOwner) public onlyOwner notFinished { 57 | require( 58 | fromToken.totalSupply() == toToken.totalSupply(), 59 | "total token supplies do not match" 60 | ); 61 | require( 62 | _newTokenOwner != address(this), 63 | "this contract cannot own the token contract" 64 | ); 65 | finished = true; 66 | toToken.transferOwnership(_newTokenOwner); 67 | emit MigrationFinished(); 68 | } 69 | 70 | // TODO: revisit whether we want to migrate approvals 71 | } 72 | -------------------------------------------------------------------------------- /contracts/contracts/token/WhitelistedPausableToken.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "../../../node_modules/openzeppelin-solidity/contracts/token/ERC20/PausableToken.sol"; 4 | 5 | 6 | /** 7 | * @title Contract for enforcing a list of addresses allowed to send or receive tokens 8 | * @dev Until the whitelist expiration expires, this contract only permits 9 | * token transfers in which an allowed transactor is either the sender or 10 | * recipient. Once the whitelist expiration passes, it becomes impossible to 11 | * re-enable the whitelist. 12 | * 13 | * This contract inherits from PausableToken to enforce both pausing and 14 | * whitelists for transfer calls. 15 | */ 16 | contract WhitelistedPausableToken is PausableToken { 17 | // UNIX timestamp (in seconds) after which this whitelist no longer applies 18 | uint256 public whitelistExpiration; 19 | // While the whitelist is active, either the sender or recipient must be 20 | // in allowedTransactors. 21 | mapping (address => bool) public allowedTransactors; 22 | 23 | event SetWhitelistExpiration(uint256 expiration); 24 | event AllowedTransactorAdded(address sender); 25 | event AllowedTransactorRemoved(address sender); 26 | 27 | // 28 | // Functions for maintaining whitelist 29 | // 30 | 31 | modifier allowedTransfer(address _from, address _to) { 32 | require( 33 | // solium-disable-next-line operator-whitespace 34 | !whitelistActive() || 35 | allowedTransactors[_from] || 36 | allowedTransactors[_to], 37 | "neither sender nor recipient are allowed" 38 | ); 39 | _; 40 | } 41 | 42 | function whitelistActive() public view returns (bool) { 43 | return block.timestamp < whitelistExpiration; 44 | } 45 | 46 | function addAllowedTransactor(address _transactor) public onlyOwner { 47 | emit AllowedTransactorAdded(_transactor); 48 | allowedTransactors[_transactor] = true; 49 | } 50 | 51 | function removeAllowedTransactor(address _transactor) public onlyOwner { 52 | emit AllowedTransactorRemoved(_transactor); 53 | delete allowedTransactors[_transactor]; 54 | } 55 | 56 | /** 57 | * @dev Set the whitelist expiration, after which the whitelist no longer 58 | * applies. 59 | */ 60 | function setWhitelistExpiration(uint256 _expiration) public onlyOwner { 61 | // allow only if whitelist expiration hasn't yet been set, or if the 62 | // whitelist expiration hasn't passed yet 63 | require( 64 | whitelistExpiration == 0 || whitelistActive(), 65 | "an expired whitelist cannot be extended" 66 | ); 67 | // prevent possible mistakes in calling this function 68 | require( 69 | _expiration >= block.timestamp + 1 days, 70 | "whitelist expiration not far enough into the future" 71 | ); 72 | emit SetWhitelistExpiration(_expiration); 73 | whitelistExpiration = _expiration; 74 | } 75 | 76 | // 77 | // ERC20 transfer functions that have been overridden to enforce the 78 | // whitelist. 79 | // 80 | 81 | function transfer( 82 | address _to, 83 | uint256 _value 84 | ) 85 | public 86 | allowedTransfer(msg.sender, _to) 87 | returns (bool) 88 | { 89 | return super.transfer(_to, _value); 90 | } 91 | 92 | function transferFrom( 93 | address _from, 94 | address _to, 95 | uint256 _value 96 | ) 97 | public 98 | allowedTransfer(_from, _to) 99 | returns (bool) 100 | { 101 | return super.transferFrom(_from, _to, _value); 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /contracts/migrations/10_verify_mainnet.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('./token/OriginToken.sol') 2 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 3 | 4 | const tokenMultiSig = '0xe011fa2a6df98c69383457d87a056ed0103aa352' 5 | const marketplaceMultiSig = '0x8a1a4f77f9f0eb35fb9930696038be6220986c1b' 6 | 7 | module.exports = function(deployer, network) { 8 | return deployer.then(() => { 9 | if (network === 'mainnet' || process.env['SIMULATE_MAINNET']) { 10 | return verifyMainnetContracts() 11 | } 12 | }) 13 | } 14 | 15 | async function verifyMainnetContracts() { 16 | await verifyTokenContract() 17 | await verifyMarketplaceContract() 18 | } 19 | 20 | async function verifyTokenContract() { 21 | const accounts = await new Promise((resolve, reject) => { 22 | web3.eth.getAccounts((error, result) => { 23 | if (error) { 24 | reject(error) 25 | } 26 | resolve(result) 27 | }) 28 | }) 29 | const oldOwner = accounts[0] 30 | const token = await OriginToken.deployed() 31 | const marketplace = await V00_Marketplace.deployed() 32 | 33 | console.log('Validating token contract') 34 | 35 | assertEquals(await token.name(), 'OriginToken', 'name') 36 | assertEquals(await token.decimals(), 18, 'decimals') 37 | assertEquals(await token.symbol(), 'OGN', 'symbol') 38 | assertEquals( 39 | (await token.owner()).toLowerCase(), 40 | tokenMultiSig.toLowerCase(), 41 | 'owner' 42 | ) 43 | assertEquals(await token.paused(), false, 'not paused') 44 | assertEquals( 45 | await token.callSpenderWhitelist(marketplace.address), 46 | true, 47 | 'marketplace whitelisted for approveAndCallWithSender' 48 | ) 49 | assertEquals( 50 | await token.allowedTransactors(oldOwner), 51 | false, 52 | 'old owner not allowed to transfer tokens' 53 | ) 54 | assertEquals( 55 | await token.allowedTransactors(marketplace.address), 56 | true, 57 | 'marketplace allowed to transfer tokens' 58 | ) 59 | assertEquals( 60 | await token.allowedTransactors(tokenMultiSig), 61 | true, 62 | 'multi-sig allowed to transfer tokens' 63 | ) 64 | assertEquals( 65 | await token.allowedTransactors('0x7aD0fa0E2380a5e0208B25AC69216Bd7Ff206bF8'), 66 | true, 67 | 'other wallet allowed to transfer tokens' 68 | ) 69 | assertEquals( 70 | await token.whitelistExpiration(), 71 | Date.parse('28 Feb 2019 00:00:00 PST') / 1000, 72 | 'whitelist expires on 2/28/2019' 73 | ) 74 | } 75 | 76 | async function verifyMarketplaceContract() { 77 | const token = await OriginToken.deployed() 78 | const marketplace = await V00_Marketplace.deployed() 79 | 80 | console.log('Validating marketplace contract') 81 | 82 | assertEquals( 83 | await marketplace.tokenAddr(), 84 | token.address, 85 | 'marketplace tokenAddr points to OriginToken' 86 | ) 87 | assertEquals( 88 | await marketplace.owner(), 89 | marketplaceMultiSig, 90 | 'marketplace contract owned by multi-sig' 91 | ) 92 | assertEquals( 93 | await marketplace.allowedAffiliates(marketplace.address), 94 | false, 95 | 'marketplace affiliate whitelist enabled' 96 | ) 97 | assertEquals( 98 | await marketplace.allowedAffiliates('0x7aD0fa0E2380a5e0208B25AC69216Bd7Ff206bF8'), 99 | true, 100 | 'marketplace affiliate address is whitelisted' 101 | ) 102 | } 103 | 104 | function assertEquals(got, expected, message) { 105 | if (got != expected) { 106 | throw new Error(`${message}: contract value ${got} != expected ${expected}`) 107 | } 108 | console.log(` * ${message}: pass`) 109 | } 110 | -------------------------------------------------------------------------------- /contracts/migrations/11_update_claim_holder_library.js: -------------------------------------------------------------------------------- 1 | // This migration is needed to update our ClaimHolderLibrary. See: https://github.com/OriginProtocol/origin-js/pull/598 2 | var ClaimHolderLibrary = artifacts.require("./ClaimHolderLibrary.sol") 3 | 4 | module.exports = function(deployer, network) { 5 | return deployer.then(() => { 6 | return deployContracts(deployer) 7 | }) 8 | } 9 | 10 | async function deployContracts(deployer) { 11 | await deployer.deploy(ClaimHolderLibrary) 12 | } 13 | -------------------------------------------------------------------------------- /contracts/migrations/1_initial_migration.js: -------------------------------------------------------------------------------- 1 | var Migrations = artifacts.require("./Migrations.sol"); 2 | 3 | module.exports = function(deployer) { 4 | deployer.deploy(Migrations); 5 | }; 6 | -------------------------------------------------------------------------------- /contracts/migrations/2_deploy_marketplace_contracts.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('./token/OriginToken.sol') 2 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 3 | 4 | module.exports = function(deployer) { 5 | return deployer.then(() => { 6 | return deployContracts(deployer) 7 | }) 8 | } 9 | 10 | async function deployContracts(deployer) { 11 | 12 | // Initial supply of 1B tokens, in natural units. 13 | await deployer.deploy(OriginToken, '1000000000000000000000000000') 14 | 15 | await deployer.deploy(V00_Marketplace, OriginToken.address) 16 | 17 | //register the marketplace as a possible caller upon token approval 18 | const token = await OriginToken.deployed() 19 | const contractOwner = await token.owner() 20 | await token.addCallSpenderWhitelist(V00_Marketplace.address, {from:contractOwner}) 21 | } 22 | -------------------------------------------------------------------------------- /contracts/migrations/3_create_sample_listings.js: -------------------------------------------------------------------------------- 1 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 2 | 3 | module.exports = function(deployer, network) { 4 | return deployer.then(() => { 5 | if (network === 'mainnet' || network === 'rinkeby' || network === 'ropsten') { 6 | console.log(`Skipping sample listings creation on ${network}`) 7 | } else { 8 | return deployContracts(deployer) 9 | } 10 | }) 11 | } 12 | 13 | async function createListing(marketplace, hash, from) { 14 | await marketplace.createListing(hash, '0', from, { gas: 4612388, from }) 15 | } 16 | 17 | async function deployContracts(deployer) { 18 | 19 | const accounts = await new Promise((resolve, reject) => { 20 | web3.eth.getAccounts((error, result) => { 21 | if (error) { 22 | reject(error) 23 | } 24 | resolve(result) 25 | }) 26 | }) 27 | 28 | const Seller = accounts[1] 29 | 30 | const marketplace00 = await V00_Marketplace.deployed() 31 | 32 | await createListing( 33 | marketplace00, 34 | '0x551e4fd07429fd2d072ca0d6668ff1c45a03a24f03191986be9b4c30a242c2ed', 35 | Seller 36 | ) 37 | await createListing( 38 | marketplace00, 39 | '0xb1ddde9f48791e6519b6b19e4318398df8e432a4dd68e694a6e52c87810cb6cb', 40 | Seller 41 | ) 42 | await createListing( 43 | marketplace00, 44 | '0xe83be3434158c69d42c1269260d3c44eeb37c316017d3e22352dea86e6b5afaf', 45 | Seller 46 | ) 47 | await createListing( 48 | marketplace00, 49 | '0x0f70fe3695f72660752fde3eb6d6aa02290758bc7cf6111f4e1e6bcb53c42633', 50 | Seller 51 | ) 52 | await createListing( 53 | marketplace00, 54 | '0x4cf7707c33291cb383d5001d7fc9d7ec9963658b2a55be1ba81db05381f293e7', 55 | Seller 56 | ) 57 | } 58 | -------------------------------------------------------------------------------- /contracts/migrations/4_deploy_identity_contracts.js: -------------------------------------------------------------------------------- 1 | var ClaimHolder = artifacts.require("./ClaimHolder.sol") 2 | var ClaimHolderLibrary = artifacts.require("./ClaimHolderLibrary.sol") 3 | var ClaimHolderPresigned = artifacts.require("./ClaimHolderPresigned.sol") 4 | var ClaimHolderRegistered = artifacts.require("./ClaimHolderRegistered.sol") 5 | var KeyHolder = artifacts.require("./KeyHolder.sol") 6 | var KeyHolderLibrary = artifacts.require("./KeyHolderLibrary.sol") 7 | var UserRegistry = artifacts.require("./V00_UserRegistry.sol") 8 | var OriginIdentity = artifacts.require("./OriginIdentity.sol") 9 | 10 | module.exports = function(deployer, network) { 11 | return deployer.then(() => { 12 | return deployContracts(deployer) 13 | }) 14 | } 15 | 16 | async function deployContracts(deployer) { 17 | await deployer.deploy(UserRegistry) 18 | 19 | await deployer.deploy(KeyHolderLibrary) 20 | await deployer.link(KeyHolderLibrary, KeyHolder) 21 | await deployer.link(KeyHolderLibrary, ClaimHolderLibrary) 22 | await deployer.deploy(ClaimHolderLibrary) 23 | 24 | await deployer.link(ClaimHolderLibrary, ClaimHolder) 25 | await deployer.link(KeyHolderLibrary, ClaimHolder) 26 | 27 | await deployer.link(ClaimHolderLibrary, ClaimHolderRegistered) 28 | await deployer.link(KeyHolderLibrary, ClaimHolderRegistered) 29 | 30 | await deployer.link(ClaimHolderLibrary, ClaimHolderPresigned) 31 | await deployer.link(KeyHolderLibrary, ClaimHolderPresigned) 32 | 33 | await deployer.link(ClaimHolderLibrary, OriginIdentity) 34 | await deployer.link(KeyHolderLibrary, OriginIdentity) 35 | await deployer.deploy(OriginIdentity) 36 | } 37 | -------------------------------------------------------------------------------- /contracts/migrations/5_create_sample_issuers.js: -------------------------------------------------------------------------------- 1 | var OriginIdentity = artifacts.require("./OriginIdentity.sol"); 2 | var Web3 = require("web3") 3 | 4 | const ISSUER_TEST = "0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf" 5 | const ISSUER_DEV = "0x99C03fBb0C995ff1160133A8bd210D0E77bCD101" 6 | const ISSUER_PROD = "0x8EAbA82d8D1046E4F242D4501aeBB1a6d4b5C4Aa" 7 | const keyPurpose = 3 8 | const keyType = 1 9 | 10 | module.exports = function(deployer, network) { 11 | return deployer.then(() => { 12 | return add_sample_issuer(network) 13 | }) 14 | } 15 | 16 | async function add_sample_issuer(network) { 17 | let accounts = await new Promise((resolve, reject) => { 18 | web3.eth.getAccounts((error, result) => { 19 | if (error) { 20 | reject(err) 21 | } 22 | resolve(result) 23 | }) 24 | }) 25 | 26 | let defaultAccount = accounts[0] 27 | let originIdentity = await OriginIdentity.deployed() 28 | 29 | if (network === "development") { 30 | await originIdentity.addKey( 31 | Web3.utils.soliditySha3(ISSUER_TEST), 32 | keyPurpose, 33 | keyType, 34 | { from: defaultAccount, gas: 4000000 } 35 | ) 36 | 37 | return await originIdentity.addKey( 38 | Web3.utils.soliditySha3(ISSUER_DEV), 39 | keyPurpose, 40 | keyType, 41 | { from: defaultAccount, gas: 4000000 } 42 | ) 43 | } else { 44 | return await originIdentity.addKey( 45 | Web3.utils.soliditySha3(ISSUER_PROD), 46 | keyPurpose, 47 | keyType, 48 | { from: defaultAccount, gas: 4000000 } 49 | ) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /contracts/migrations/6_transfer_tokens_for_dev.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('./token/OriginToken.sol') 2 | const assert = require('assert') 3 | 4 | // NOTE: this file will only have an effect for local blockchains 5 | 6 | module.exports = function(deployer, network) { 7 | return deployer.then(() => { 8 | if (network === 'development') { 9 | console.log('Transferring OGN to test accounts') 10 | return transferTokensToTestAccounts(deployer, network) 11 | } else { 12 | console.log('Skipping') 13 | } 14 | }) 15 | } 16 | 17 | // Distribute tokens to test accounts when using a local blockchain. 18 | async function transferTokensToTestAccounts(deployer, network) { 19 | // The testnets have the token faucet, so we don't need this there. 20 | assert (network === 'development') 21 | 22 | const accounts = await new Promise((resolve, reject) => { 23 | web3.eth.getAccounts((error, result) => { 24 | if (error) { 25 | reject(error) 26 | } 27 | resolve(result) 28 | }) 29 | }) 30 | 31 | const lastAccount = accounts[accounts.length - 1] 32 | const token = await OriginToken.deployed() 33 | const contractOwner = await token.owner() 34 | const decimals = await token.decimals() 35 | 36 | // The last account will receive the bulk of the tokens. This is to give a 37 | // more representative UX for the other accounts 38 | await token.transfer( 39 | lastAccount, 40 | await token.balanceOf(accounts[0]), 41 | { from: contractOwner } 42 | ) 43 | 44 | // Everyone else gets a fixed number of tokens to test with. 45 | const tokensPerAccount = 200 46 | for (let i = 0; i < accounts.length - 1; i++) { 47 | await token.transfer( 48 | accounts[i], 49 | tokensPerAccount * 10**decimals, 50 | { from: lastAccount } 51 | ) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /contracts/migrations/7_create_token_whitelist.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('./token/OriginToken.sol') 2 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 3 | 4 | // Store whitelist expiration as a UNIX timestamp. 5 | const whitelistExpiration = Date.parse('28 Feb 2019 00:00:00 PST') / 1000 6 | 7 | module.exports = function(deployer, network) { 8 | if (network !== 'development' || process.env['SIMULATE_MAINNET']) { 9 | return createTokenWhitelist(network) 10 | } 11 | } 12 | 13 | async function createTokenWhitelist(network) { 14 | const token = await OriginToken.deployed() 15 | const tokenOwner = await token.owner() 16 | 17 | // The token contract owner is also the owner of the test faucet, which needs 18 | // to send tokens to arbitrary addresses. 19 | await token.addAllowedTransactor(tokenOwner, { from: tokenOwner }) 20 | console.log(`Added token contract owner ${tokenOwner} to whitelist`) 21 | 22 | // Marketplace must be able to send OGN to any address (for refunds) and 23 | // receive OGN from any address (for offers with commissions). 24 | const marketplace = await V00_Marketplace.deployed() 25 | await token.addAllowedTransactor(marketplace.address, { from: tokenOwner }) 26 | console.log(`Added marketplace ${marketplace.address} to whitelist`) 27 | 28 | if (network === 'mainnet' || process.env['SIMULATE_MAINNET']) { 29 | const addresses = [ 30 | '0x7aD0fa0E2380a5e0208B25AC69216Bd7Ff206bF8', // affiliate 31 | '0xe011fa2a6df98c69383457d87a056ed0103aa352', // ERC20 multi-sig 32 | ] 33 | for (const address of addresses) { 34 | await token.addAllowedTransactor(address, { from: tokenOwner }) 35 | console.log(`Added address ${address} to whitelist`) 36 | } 37 | } 38 | 39 | // Activate the whitelist. 40 | await token.setWhitelistExpiration(whitelistExpiration, { from: tokenOwner }) 41 | console.log(`Enabled token whitelist, expiring at UNIX timestamp ${whitelistExpiration}`) 42 | } 43 | -------------------------------------------------------------------------------- /contracts/migrations/8_whitelist_affiliate.js: -------------------------------------------------------------------------------- 1 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 2 | 3 | module.exports = function(deployer, network) { 4 | return deployer.then(() => { 5 | return whitelistAffiliate(deployer, network) 6 | }) 7 | } 8 | 9 | async function whitelistAffiliate(_, network) { 10 | const accounts = await new Promise((resolve, reject) => { 11 | web3.eth.getAccounts((error, result) => { 12 | if (error) { 13 | reject(error) 14 | } 15 | resolve(result) 16 | }) 17 | }) 18 | 19 | const marketplace = await V00_Marketplace.deployed() 20 | const from = await marketplace.owner() 21 | 22 | // These need to remain synced with the dockerfiles in origin-box. 23 | const affiliates = { 24 | rinkeby: '0xc1a33cda27c68e47e370ff31cdad7d6522ea93d5', 25 | origin: '0xc1a33cda27c68e47e370ff31cdad7d6522ea93d5', 26 | development: '0x821aea9a577a9b44299b9c15c88cf3087f3b5544', 27 | mainnet: '0x7aD0fa0E2380a5e0208B25AC69216Bd7Ff206bF8' 28 | } 29 | 30 | if (process.env['SIMULATE_MAINNET']) { 31 | console.log('simulating mainnet') 32 | network = 'mainnet' 33 | } 34 | const affiliate = affiliates[network] 35 | if (affiliate) { 36 | console.log(`whitelisting affiliate ${affiliate}`) 37 | await marketplace.addAffiliate( 38 | affiliate, 39 | '0x0000000000000000000000000000000000000000000000000000000000000000', 40 | { from } 41 | ) 42 | } else { 43 | console.log(`WARNING: no affiliate whitelisted for network ${network}`) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /contracts/migrations/9_transfer_ownership_and_tokens_for_mainnet.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('./token/OriginToken.sol') 2 | const V00_Marketplace = artifacts.require('./V00_Marketplace.sol') 3 | 4 | // TODO: extract these addresses into a common file that can be imported from 5 | // the various places that require these addresses 6 | const tokenMultiSig = '0xe011fa2a6df98c69383457d87a056ed0103aa352' 7 | const marketplaceMultiSig = '0x8a1a4f77f9f0eb35fb9930696038be6220986c1b' 8 | 9 | module.exports = function(deployer, network) { 10 | return deployer.then(() => { 11 | if (network === 'mainnet' || process.env['SIMULATE_MAINNET']) { 12 | return transferTokensAndContractsToMultiSig() 13 | } 14 | }) 15 | } 16 | 17 | async function transferTokensAndContractsToMultiSig() { 18 | const accounts = await new Promise((resolve, reject) => { 19 | web3.eth.getAccounts((error, result) => { 20 | if (error) { 21 | reject(error) 22 | } 23 | resolve(result) 24 | }) 25 | }) 26 | const owner = accounts[0] 27 | const token = await OriginToken.deployed() 28 | 29 | // Transfer all tokens to multi-sig wallet. 30 | const balance = await token.balanceOf(owner) 31 | const decimals = await token.decimals() 32 | await token.transfer(tokenMultiSig, balance, { from: owner }) 33 | const balanceTokens = balance / 10**decimals 34 | console.log(`transferred ${balanceTokens} OGN to ${tokenMultiSig}`) 35 | 36 | // Contract owner is a throwaway account, so remove it from the transactor 37 | // whitelist. 38 | await token.removeAllowedTransactor(owner, { from: owner }) 39 | 40 | // Transfer token contract to multi-sig wallet. 41 | await token.transferOwnership(tokenMultiSig, { from: owner }) 42 | console.log(`token contract owner set to ${tokenMultiSig}`) 43 | 44 | // Transfer marketplace contract to multi-sig wallet. 45 | const marketplace = await V00_Marketplace.deployed() 46 | await marketplace.transferOwnership(marketplaceMultiSig, { from: owner }) 47 | console.log(`marketplace contract owner set to ${marketplaceMultiSig}`) 48 | } 49 | 50 | -------------------------------------------------------------------------------- /contracts/migrations/README.md: -------------------------------------------------------------------------------- 1 | # Migrations 2 | 3 | These scripts define how contracts are deployed onto the blockchain. 4 | Once a migration has been run, it cannot be undone. Truffle keeps track 5 | of which migrations have already been run (via `Migrations.sol`) and 6 | will only run _new_ migrations. You can force all new migrations with 7 | the `--reset` modifier. ([link](https://truffleframework.com/docs/truffle/getting-started/running-migrations#command)) 8 | -------------------------------------------------------------------------------- /contracts/releases/0.6.0/0.6.0_rinkeby.log: -------------------------------------------------------------------------------- 1 | Using network 'rinkeby'. 2 | 3 | Running migration: 1_initial_migration.js 4 | Deploying Migrations... 5 | ... 0x0885b7021b73169a25dacdb895d1a028bab4791c58d8edb4df373f3123ba5209 6 | Migrations: 0xed747bfc08ec4f97f04278b7940acefbd8d7f4cc 7 | Saving successful migration to network... 8 | ... 0x010ef25e82c54a56cf8b55527720b9e9c11fe82eda74e9d9149c1661979293be 9 | Saving artifacts... 10 | Running migration: 2_deploy_contracts.js 11 | Running step... 12 | Deploying PurchaseLibrary... 13 | ... 0x2be7f975ba7507a67604ad519f99f56f0e45dd05f8e528fe3ad38c25e6cc7c47 14 | PurchaseLibrary: 0x0b3fc419df0ee2caa30b512fa098c7e0c0582d0c 15 | Linking PurchaseLibrary to ListingsRegistry 16 | Linking PurchaseLibrary to Listing 17 | Deploying UserRegistry... 18 | ... 0xd8285c96e319db762b13129fe202bf28e5c0f42d47ce94ae5448deefae3d43a9 19 | UserRegistry: 0xa080e08a6355debebbc29b9e35d8c001ad6eb3cd 20 | Deploying ListingsRegistry... 21 | ... 0xc18d795c699d8e6d804b9e83ac2f3b2ce218667f249852cc4ed886c4d646abd2 22 | ListingsRegistry: 0xf9555a250a36f59c6b74aabec018b8f55da72ec4 23 | Deploying KeyHolderLibrary... 24 | ... 0xc2fc5a949183df415a9b5e7a29644689982a6cbf0a39ba5739ab75cf1485b15f 25 | KeyHolderLibrary: 0xb67eaa8e3044c2cfbe7a4bd9192111712db0d09b 26 | Linking KeyHolderLibrary to KeyHolder 27 | Linking KeyHolderLibrary to ClaimHolderLibrary 28 | Deploying ClaimHolderLibrary... 29 | ... 0x312336e48d659b34b5719648c8b4655255ba82ae1a07b3dc9928b4588e2c56e9 30 | ClaimHolderLibrary: 0x7c410ddc563cbb03f9d21fd1616c597e7c66318f 31 | Linking ClaimHolderLibrary to ClaimHolder 32 | Linking KeyHolderLibrary to ClaimHolder 33 | Linking ClaimHolderLibrary to ClaimHolderRegistered 34 | Linking KeyHolderLibrary to ClaimHolderRegistered 35 | Linking ClaimHolderLibrary to ClaimHolderPresigned 36 | Linking KeyHolderLibrary to ClaimHolderPresigned 37 | Linking ClaimHolderLibrary to OriginIdentity 38 | Linking KeyHolderLibrary to OriginIdentity 39 | Deploying OriginIdentity... 40 | ... 0x8215c58d6df2d026f86a72c84db6b8df889885dc1154e3a8777be73e9f5c45dd 41 | OriginIdentity: 0xc8467dc9155f9d7045ea478c85dc074aecb893c6 42 | Saving successful migration to network... 43 | ... 0x7af8c2eea29e1deee826228fec79f39ba55ab2522c1a678409c25060ebfa262d 44 | Saving artifacts... 45 | Running migration: 3_create_sample_listings.js 46 | Running step... 47 | default_account: 0x1a5c29c94d03c4c8f7414564cbd57295d61e898f 48 | a_seller_account: 0x564aae0251d49d1f8d4d8d9e5da08f8cceff9ef2 49 | a_buyer_account: 0x47d4e5afd1624ea63cd7ace59d5dc1d87cff3bb5 50 | another_buyer_account: 0xb7cc090637f6af85e3ed8cc623a5efe95b90b9f9 51 | ... 0x506f96280576400dbf5408918658a38fcf5bc6e48daf472c6d34bf14f14b0564 52 | ... 0x82b59071c9938df411b11c176a2874214ee01e347ff18afc9c3894fb5e57ff59 53 | ... 0x361456cd2d0eccdf9aab7dcf1a3e710d0a6f6c909e7ae6ec43b59d68aec287ea 54 | ... 0xe193bdb6da94c91a8a9da9f0fc433ee1d7199b1b44834ea9a56b13b621514f7e 55 | ... 0x7bd7a8539cb02b2fa2057831ce4d49868a1fce9facbcea6191075abac275607c 56 | Saving successful migration to network... 57 | ... 0x8b0f922176a5dfe75cb0183e1b628d3aecf3408373114a7f2499a3de4bf64264 58 | Saving artifacts... 59 | Running migration: 4_add_sample_issuers.js 60 | Running step... 61 | ... 0xa81b0b0fb36b0e35a26b4f10a971cb06f78fefccf8e70629796cedf2ca87d080 62 | ... 0xfbb19c13527aa5d156d63f11b17d22dbee15271c1b3db5449c9fb50fb90ae72a 63 | Saving successful migration to network... 64 | ... 0xa656ced07ec1236e03fcdcde4d46d548dad1b2f9a1afda95741ab6a96ed2231d 65 | Saving artifacts... 66 | -------------------------------------------------------------------------------- /contracts/releases/0.7.0/0.7.0_rinkeby.log: -------------------------------------------------------------------------------- 1 | Using network 'rinkeby'. 2 | 3 | Running migration: 1_initial_migration.js 4 | Deploying Migrations... 5 | ... 0xfef13dcdb8581ee85e1cc60bfeac86a25e168439257d4c7f68a5a304520f1d06 6 | Migrations: 0xa4dd200833725502d01a62b7fc19d2ad2ef86579 7 | Saving successful migration to network... 8 | ... 0x6c343e3f350e5910b3cb5bd62bed18f484555ed90945c340efcdef008f992112 9 | Saving artifacts... 10 | Running migration: 2_deploy_contracts.js 11 | Running step... 12 | Deploying PurchaseLibrary... 13 | ... 0x33249f338020ccfa40640acf48b3cfcc1eba845e3f280489e256eb06799879d8 14 | PurchaseLibrary: 0x97207ce0e1d00cb386ec8c056ca24d3ee90222dd 15 | Linking PurchaseLibrary to ListingsRegistry 16 | Linking PurchaseLibrary to UnitListing 17 | Linking PurchaseLibrary to FractionalListing 18 | Deploying ListingsRegistryStorage... 19 | ... 0xc8475313274193d18d08259f1a898bf0ee3eb13318ddf50b4415b11979a38261 20 | ListingsRegistryStorage: 0x12593d9f73d8cc7056dc073617380694e92d3a49 21 | Deploying ListingsRegistry... 22 | ... 0x0926828673caec35d70c3edf28cfa8b1086d145c27d75d789210f18cdbdd1cc4 23 | ListingsRegistry: 0x4f041f3ec338f85048831d1dff15433d3cb3983f 24 | Deploying UserRegistry... 25 | ... 0xe5215d142166c204c7a3e686302faead9994ed332871e4c942f63fed78c94386 26 | ... 0x6b35773c40a675cb6161446b0cf8151481caeb01fb3d5947d2093e55728813ae 27 | UserRegistry: 0xa8c4743d0e63c91ebe2f391d62a755535a458174 28 | Deploying KeyHolderLibrary... 29 | ... 0x013adf215c67f630eeaa7f5151cd475b2f1cf80cca1d8fc022e3cc2b36202b3e 30 | KeyHolderLibrary: 0x2d8d9390c4d26529f2f1777a9fff427993b112ac 31 | Linking KeyHolderLibrary to KeyHolder 32 | Linking KeyHolderLibrary to ClaimHolderLibrary 33 | Deploying ClaimHolderLibrary... 34 | ... 0x9d75a56f82e7db8bb15a63fa79f27588ef7cc789f921dbb22f0132780c23d3d3 35 | ClaimHolderLibrary: 0xe7f86637fd14c291aebdf5c152fd0a6811024a6e 36 | Linking ClaimHolderLibrary to ClaimHolder 37 | Linking KeyHolderLibrary to ClaimHolder 38 | Linking ClaimHolderLibrary to ClaimHolderRegistered 39 | Linking KeyHolderLibrary to ClaimHolderRegistered 40 | Linking ClaimHolderLibrary to ClaimHolderPresigned 41 | Linking KeyHolderLibrary to ClaimHolderPresigned 42 | Linking ClaimHolderLibrary to OriginIdentity 43 | Linking KeyHolderLibrary to OriginIdentity 44 | Deploying OriginIdentity... 45 | ... 0xb5dcb1bab30037d677c52c03b4a6b11919bf635e5c2cbeab303999dd4c92a2b6 46 | OriginIdentity: 0x64bfb1657a382ebdfaf1f582b00706d99e911393 47 | Saving successful migration to network... 48 | ... 0x3aa7bb564bdb85f1ba34bee9e5edd8378518707e1dc5bf43ee176961e5a29896 49 | Saving artifacts... 50 | Running migration: 3_create_sample_listings.js 51 | Running step... 52 | default_account: 0xff2ba846ab52edbd724a5ef674abf5a763849b61 53 | a_seller_account: 0x3003f9dcfdc17e63cfe7023130b804829b369882 54 | a_buyer_account: 0xc1d1510b22f8119c3186637198821c03f1dbb2be 55 | another_buyer_account: 0x743e2d6dfc80dcae9c05511cdf8595fd3a6ef725 56 | ... 0x6d1b83a921ed43d30e56993ca97886c4ffcea16c7fe876bd17cac9ef417b9746 57 | ... 0x36f48fdf02e2de1632ab76dd3a1ecb197fab0090425bbe6126679a7ce68b6942 58 | ... 0x5f7437924c9a4787cc443bfad7588242c31659b40fb638e3ce13de7a4115d969 59 | ... 0x2b220bfbaf6fefc40b4e9013173d952b11d17718af12639cc94d1c68ad4f427e 60 | ... 0x45aa213a67e88f63c44f7b9a0b3f293151d779fb0c0845338ad61f34f36db8f6 61 | Saving successful migration to network... 62 | ... 0xe97e91661b598b9ebc20b438ce4790d556d21a47f17abaad49966897fd5d3c71 63 | Saving artifacts... 64 | Running migration: 4_add_sample_issuers.js 65 | Running step... 66 | ... 0x256e534005a16213d73033863f065f3d54247bcfe00b2fd056998da54e5da64e 67 | Saving successful migration to network... 68 | ... 0xf164a794911f8635eda64aff9d078e40eba92c0f278e75610a251dc4ddf60f59 69 | Saving artifacts... 70 | -------------------------------------------------------------------------------- /contracts/releases/0.7.0/0.7.0_ropsten.log: -------------------------------------------------------------------------------- 1 | Using network 'ropsten'. 2 | 3 | Running migration: 1_initial_migration.js 4 | Deploying Migrations... 5 | ... 0xf5be4b8c40ac5883fc10bcd9943250e78f3d4d0ef0b33252e8804a9603987815 6 | Migrations: 0xbba53b5f0ba938310e67ffd0aef84c088987f6ff 7 | Saving successful migration to network... 8 | ... 0xe9f312e942e823c86b91ea13a78917eed5e410b23538231774508ef532c41c20 9 | Saving artifacts... 10 | Running migration: 2_deploy_contracts.js 11 | Running step... 12 | Deploying PurchaseLibrary... 13 | ... 0x34d2765cc5c4dd6cb6fc7bb89d83619a1815b8dbe600a5e8df5c8252497d97f8 14 | PurchaseLibrary: 0x50945af445717052aca9ce81b7762804e83a3f89 15 | Linking PurchaseLibrary to ListingsRegistry 16 | Linking PurchaseLibrary to UnitListing 17 | Linking PurchaseLibrary to FractionalListing 18 | Deploying ListingsRegistryStorage... 19 | ... 0xdb874e3b4e2eb6e0ac23812246aeb9dbe27295eca010bbbbb1b89825de50fed3 20 | ListingsRegistryStorage: 0x2293f5592885de85c68b03f69522023957ad5133 21 | Deploying ListingsRegistry... 22 | ... 0x2474dd4571e69a3e10004c6d08f27b5655c31808d0164f8a41a2520de5565093 23 | ListingsRegistry: 0x67057a6ddbc10d1c75a8812f770854f8627fc7b6 24 | Deploying UserRegistry... 25 | ... 0xffa0c58ceb20c688e29b87f8d74b43b8217cba4290672f83897fa895cb402fdb 26 | ... 0x48af395eeff94e191fad9e22406db6b8770e35367435d74a11386d987cbd1824 27 | UserRegistry: 0x66dc9c44e49f3fb275a3c63d02d78a819c58f458 28 | Deploying KeyHolderLibrary... 29 | ... 0x5633858cab56908677d6b68ff5ae209eadace14ac7a9161a652d25a329183ea0 30 | KeyHolderLibrary: 0x4a80ed225115cb80dbeff1bba38d09c43b06c9c0 31 | Linking KeyHolderLibrary to KeyHolder 32 | Linking KeyHolderLibrary to ClaimHolderLibrary 33 | Deploying ClaimHolderLibrary... 34 | ... 0xe8cb70e99c4b1b09d821397b0aad314e885f132abdc5c984b5052bc224da1590 35 | ClaimHolderLibrary: 0x8ed1562d9ad4bcf2a291721c31ab12a96f094b2b 36 | Linking ClaimHolderLibrary to ClaimHolder 37 | Linking KeyHolderLibrary to ClaimHolder 38 | Linking ClaimHolderLibrary to ClaimHolderRegistered 39 | Linking KeyHolderLibrary to ClaimHolderRegistered 40 | Linking ClaimHolderLibrary to ClaimHolderPresigned 41 | Linking KeyHolderLibrary to ClaimHolderPresigned 42 | Linking ClaimHolderLibrary to OriginIdentity 43 | Linking KeyHolderLibrary to OriginIdentity 44 | Deploying OriginIdentity... 45 | ... 0x3b922ccbc71a8beafa0c3c5d0238eb6fc9d373e7533a2313008918ea9c30628e 46 | OriginIdentity: 0xe1bf90c1fcac71cf718a3f239f71023b0f64281d 47 | Saving successful migration to network... 48 | ... 0x3aa7609083d390960fbcb1d1f59d862d391149aa1d43434e52190651ad029af8 49 | Saving artifacts... 50 | Running migration: 3_create_sample_listings.js 51 | Running step... 52 | default_account: 0xff2ba846ab52edbd724a5ef674abf5a763849b61 53 | a_seller_account: 0x3003f9dcfdc17e63cfe7023130b804829b369882 54 | a_buyer_account: 0xc1d1510b22f8119c3186637198821c03f1dbb2be 55 | another_buyer_account: 0x743e2d6dfc80dcae9c05511cdf8595fd3a6ef725 56 | ... 0xb4e1ae7a7be627999e62400b8049fae40a75710b84b4086cc973c2504e594545 57 | ... 0xcf197a86449e42d5e9979aa6123d904e84166916d03698c5c7bec72695ed2eb1 58 | ... 0xdc8be01fd72832cecaaf8b8c47d5cce6d6dd66186473ba017fb1b85cf566a14b 59 | ... 0x2b87d8ac2e978f7275f1b74ad2b1b55c090f4a0f9e612548cdfd415fa02cfe39 60 | ... 0xd063dca6aa15247a41bcdb5a05c79f30c16199fbf8152e459d871bb27877a4ed 61 | Saving successful migration to network... 62 | ... 0x0fae27899f07c5abeca4b73a5014894d8436a77cc12696bddf69795ba0724619 63 | Saving artifacts... 64 | Running migration: 4_add_sample_issuers.js 65 | Running step... 66 | ... 0x666f5f75717e1802ccf5bc34590bdba43c2cd57871f45d8942fc1472b9822f5d 67 | Saving successful migration to network... 68 | ... 0x68ec294111b7d100d1b7118e946be889a0d73aa5677c672a87eec6158c29da85 69 | Saving artifacts... 70 | -------------------------------------------------------------------------------- /contracts/releases/0.7.0/README.md: -------------------------------------------------------------------------------- 1 | ⚠️ Note that the addresses of identity contracts were modified by hand (by Stan) so as to preserve existing attestations from previous releases. 2 | -------------------------------------------------------------------------------- /contracts/releases/0.8.4/0.8.4_mainnet.log: -------------------------------------------------------------------------------- 1 | Configured truffle to use PrivateKey provider for Mainnet. 2 | Using network 'mainnet'. 3 | 4 | Running migration: 11_update_claim_holder_library.js 5 | Running step... 6 | Replacing ClaimHolderLibrary... 7 | ... 0x4b873babbc32e2b9442c1f6fb741f11dccb7e35f08a43f90cb62a3743a6af886 8 | ClaimHolderLibrary: 0x79b152da4ad8fb774700639bed68a965b0440644 9 | Saving successful migration to network... 10 | ... 0xf99c0ddadb71c53a08a75253d1430503761ee6527eac6152364ded4be429a9ef 11 | Saving artifacts... 12 | -------------------------------------------------------------------------------- /contracts/releases/0.8.4/0.8.4_rinkeby.log: -------------------------------------------------------------------------------- 1 | Compiling ./contracts/Migrations.sol... 2 | Compiling ./contracts/identity/ClaimHolder.sol... 3 | Compiling ./contracts/identity/ClaimHolderLibrary.sol... 4 | Compiling ./contracts/identity/ClaimHolderPresigned.sol... 5 | Compiling ./contracts/identity/ClaimHolderRegistered.sol... 6 | Compiling ./contracts/identity/ClaimVerifier.sol... 7 | Compiling ./contracts/identity/ERC725.sol... 8 | Compiling ./contracts/identity/ERC735.sol... 9 | Compiling ./contracts/identity/KeyHolder.sol... 10 | Compiling ./contracts/identity/KeyHolderLibrary.sol... 11 | Compiling ./contracts/identity/OriginIdentity.sol... 12 | Compiling ./contracts/identity/V00_UserRegistry.sol... 13 | Compiling ./contracts/marketplace/v00/Marketplace.sol... 14 | Compiling ./contracts/multisig/IMultiSigWallet.sol... 15 | Compiling ./contracts/token/OriginToken.sol... 16 | Compiling ./contracts/token/TokenMigration.sol... 17 | Compiling ./contracts/token/WhitelistedPausableToken.sol... 18 | Compiling ./../node_modules/openzeppelin-solidity/contracts/lifecycle/Pausable.sol... 19 | Compiling ./../node_modules/openzeppelin-solidity/contracts/math/SafeMath.sol... 20 | Compiling ./../node_modules/openzeppelin-solidity/contracts/ownership/Ownable.sol... 21 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/BasicToken.sol... 22 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/BurnableToken.sol... 23 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/DetailedERC20.sol... 24 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/ERC20.sol... 25 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/ERC20Basic.sol... 26 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/MintableToken.sol... 27 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/PausableToken.sol... 28 | Compiling ./../node_modules/openzeppelin-solidity/contracts/token/ERC20/StandardToken.sol... 29 | Writing artifacts to ./build/contracts 30 | 31 | Using network 'rinkeby'. 32 | 33 | Running migration: 11_update_claim_holder_library.js 34 | Running step... 35 | Replacing ClaimHolderLibrary... 36 | ... 0xefe7eefc273f78224e00c6fe96c8efb1acc797865f6921c37f547599c6e5de0e 37 | ClaimHolderLibrary: 0x75601c2dc926b0e3ce41b1ae719290c62aba978e 38 | Saving successful migration to network... 39 | ... 0x3ddbabb698ca2eba32218345c08b83cf037a46b86267015249af813eaf4d27f3 40 | Saving artifacts... 41 | -------------------------------------------------------------------------------- /contracts/test-alt/Arbitrator.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import helper from './_helper' 3 | 4 | // Account 0: Token owner. Marketplace owner 5 | // Account 1: Seller 6 | // Account 2: Buyer 7 | // Account 3: Dispute resolver 8 | 9 | describe('ArbitrableExample.sol', async function() { 10 | let accounts, deploy, web3 11 | let Arbitrator, Arbitrable 12 | 13 | before(async function() { 14 | ({ deploy, accounts, web3 } = await helper(`${__dirname}/..`)) 15 | 16 | Arbitrator = await deploy('CentralizedArbitrator', { 17 | from: accounts[0], 18 | path: `${__dirname}/contracts/arbitration/`, 19 | args: [0] 20 | }) 21 | 22 | Arbitrable = await deploy('ArbitrableExample', { 23 | from: accounts[0], 24 | path: `${__dirname}/contracts/arbitration/`, 25 | args: [Arbitrator._address] 26 | }) 27 | }) 28 | 29 | it('should allow a dispute to be created', async function() { 30 | const result = await Arbitrable.methods.startDispute().send({ 31 | value: web3.utils.toWei('0.1', 'ether') 32 | }) 33 | assert(result.events.Dispute) 34 | }) 35 | 36 | it('should allow a ruling to be given', async function() { 37 | const result = await Arbitrator.methods.giveRuling(0, 1).send() 38 | const { data, topics } = result.events['0'].raw 39 | const ruling = Arbitrable._jsonInterface.find(i => { 40 | return i.signature === topics[0] 41 | }) 42 | const decoded = web3.eth.abi.decodeLog(ruling.inputs, data, topics) 43 | assert.equal(ruling.name, 'Ruling') 44 | assert.equal(decoded._ruling, '1') 45 | }) 46 | 47 | it('should allow evidence to be submitted', async function() { 48 | const result = await Arbitrable.methods.submitEvidence(0, 'evidence').send() 49 | assert(result.events.Evidence) 50 | }) 51 | }) 52 | -------------------------------------------------------------------------------- /contracts/test-alt/ClaimHolderPresigned.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import helper, { contractPath } from './_helper' 3 | 4 | const signature_1 = 5 | '0xeb6123e537e17e2c67b67bbc0b93e6b25ea9eae276c4c2ab353bd7e853ebad2446cc7e91327f3737559d7a9a90fc88529a6b72b770a612f808ab0ba57a46866e1c' 6 | const signature_2 = 7 | '0x061ef9cdd7707d90d7a7d95b53ddbd94905cb05dfe4734f97744c7976f2776145fef298fd0e31afa43a103cd7f5b00e3b226b0d62e4c492d54bec02eb0c2a0901b' 8 | 9 | const dataHash_1 = 10 | '0x4f32f7a7d40b4d65a917926cbfd8fd521483e7472bcc4d024179735622447dc9' 11 | const dataHash_2 = 12 | '0xa183d4eb3552e730c2dd3df91384426eb88879869b890ad12698320d8b88cb48' 13 | 14 | describe('ClaimHolderPresigned', async function() { 15 | let accounts, deploy, soliditySha3, userRegistry, claimHolderPresigned, 16 | attestation_1, attestation_2 17 | 18 | before(async function() { 19 | ({ 20 | deploy, 21 | accounts, 22 | web3: { 23 | utils: { soliditySha3 } 24 | } 25 | } = await helper(`${__dirname}/..`)) 26 | 27 | attestation_1 = { 28 | topic: 1, 29 | scheme: 1, 30 | issuer: accounts[1], 31 | signature: signature_1, 32 | data: dataHash_1, 33 | uri: '' 34 | } 35 | attestation_2 = { 36 | topic: 2, 37 | scheme: 1, 38 | issuer: accounts[2], 39 | signature: signature_2, 40 | data: dataHash_2, 41 | uri: '' 42 | } 43 | 44 | userRegistry = await deploy('V00_UserRegistry', { 45 | from: accounts[3], 46 | path: `${contractPath}/identity/` 47 | }) 48 | claimHolderPresigned = await deploy('ClaimHolderPresigned', { 49 | from: accounts[0], 50 | path: `${contractPath}/identity/`, 51 | args: [ 52 | userRegistry._address, 53 | [attestation_1.topic, attestation_2.topic], 54 | [attestation_1.issuer, attestation_2.issuer], 55 | attestation_1.signature + attestation_2.signature.slice(2), 56 | attestation_1.data + attestation_2.data.slice(2), 57 | [32, 32] 58 | ] 59 | }) 60 | }) 61 | 62 | it('should deploy identity with attestations', async function() { 63 | // Check attestation 1 64 | const claimId_1 = soliditySha3( 65 | attestation_1.issuer, 66 | attestation_1.topic 67 | ) 68 | const fetchedClaim_1 = await claimHolderPresigned.methods.getClaim(claimId_1).call() 69 | assert.ok(fetchedClaim_1) 70 | let { 71 | topic, 72 | scheme, 73 | issuer, 74 | signature, 75 | data, 76 | uri 77 | } = fetchedClaim_1 78 | assert.equal(Number(topic), attestation_1.topic) 79 | assert.equal(Number(scheme), attestation_1.scheme) 80 | assert.equal(issuer, attestation_1.issuer) 81 | assert.equal(signature, attestation_1.signature) 82 | assert.equal(data, attestation_1.data) 83 | assert.equal(uri, attestation_1.uri) 84 | 85 | // Check attestation 2 86 | const claimId_2 = soliditySha3( 87 | attestation_2.issuer, 88 | attestation_2.topic 89 | ) 90 | const fetchedClaim_2 = await claimHolderPresigned.methods.getClaim(claimId_2).call() 91 | assert.ok(fetchedClaim_2); 92 | ({ 93 | topic, 94 | scheme, 95 | issuer, 96 | signature, 97 | data, 98 | uri 99 | } = fetchedClaim_2) 100 | assert.equal(Number(topic), attestation_2.topic) 101 | assert.equal(Number(scheme), attestation_2.scheme) 102 | assert.equal(issuer, attestation_2.issuer) 103 | assert.equal(signature, attestation_2.signature) 104 | assert.equal(data, attestation_2.data) 105 | assert.equal(uri, attestation_2.uri) 106 | 107 | // Check user registry 108 | const identityAddress = await userRegistry.methods.users(accounts[0]).call() 109 | assert.ok(identityAddress) 110 | assert.notEqual( 111 | identityAddress, 112 | '0x0000000000000000000000000000000000000000' 113 | ) 114 | }) 115 | }) 116 | -------------------------------------------------------------------------------- /contracts/test-alt/ClaimVerifier.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import helper, { contractPath } from './_helper' 3 | 4 | describe('ClaimVerifier.sol', async function() { 5 | let web3, accounts, deploy, prvSigner, pubSigner 6 | let UserIdentity, ClaimIssuer, ClaimVerifier 7 | 8 | before(async function() { 9 | ({ deploy, accounts, web3 } = await helper(`${__dirname}/..`)) 10 | 11 | prvSigner = web3.utils.randomHex(32) 12 | pubSigner = web3.eth.accounts.privateKeyToAccount(prvSigner).address 13 | 14 | UserIdentity = await deploy('ClaimHolder', { 15 | from: accounts[0], 16 | path: `${contractPath}/identity/` 17 | }) 18 | ClaimIssuer = await deploy('ClaimHolder', { 19 | from: accounts[1], 20 | path: `${contractPath}/identity/` 21 | }) 22 | ClaimVerifier = await deploy('ClaimVerifier', { 23 | from: accounts[2], 24 | args: [ClaimIssuer._address], 25 | path: `${contractPath}/identity/` 26 | }) 27 | }) 28 | 29 | it('should allow verifier owner to addKey', async function() { 30 | const key = web3.utils.sha3(pubSigner) 31 | const result = await ClaimIssuer.methods 32 | .addKey(key, 3, 1) 33 | .send({ from: accounts[1] }) 34 | 35 | assert(result) 36 | }) 37 | 38 | it('should not allow new listing without identity claim', async function() { 39 | const res = await ClaimVerifier.methods 40 | .checkClaim(UserIdentity._address, 3) 41 | .send({ from: accounts[0] }) 42 | assert(res.events.ClaimInvalid) 43 | }) 44 | 45 | it('should allow identity owner to addClaim', async function() { 46 | const data = web3.utils.asciiToHex('Verified OK') 47 | const topic = 3 48 | const hashed = web3.utils.soliditySha3(UserIdentity._address, topic, data) 49 | const signed = await web3.eth.accounts.sign(hashed, prvSigner) 50 | 51 | const claimRes = await UserIdentity.methods 52 | .addClaim( 53 | topic, 54 | 2, 55 | ClaimIssuer._address, 56 | signed.signature, 57 | data, 58 | 'abc.com' 59 | ) 60 | .send({ from: accounts[0] }) 61 | 62 | assert(claimRes.events.ClaimAdded) 63 | }) 64 | 65 | it('should not allow new listing without identity claim', async function() { 66 | const res = await ClaimVerifier.methods 67 | .checkClaim(UserIdentity._address, 3) 68 | .send({ from: accounts[0] }) 69 | assert(res.events.ClaimValid) 70 | }) 71 | }) 72 | -------------------------------------------------------------------------------- /contracts/test-alt/TokenLib.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import helper, { contractPath } from './_helper' 3 | import Token from '../../token/lib/token' 4 | 5 | // These tests are for the token library that the token CLI uses. We don't 6 | // validate the effects of various operations. That is left to the contract 7 | // tests. 8 | describe('Token CLI Library', async function() { 9 | const supply = 1000 10 | const networkId = '999' 11 | 12 | let accounts, deploy, web3 13 | let owner, nonOwner 14 | let OriginToken 15 | let TokenLib 16 | 17 | beforeEach(async function() { 18 | ({ 19 | deploy, 20 | accounts, 21 | web3, 22 | } = await helper(`${__dirname}/..`)) 23 | owner = accounts[0] 24 | nonOwner = accounts[1] 25 | OriginToken = await deploy('OriginToken', { 26 | from: owner, 27 | path: `${contractPath}/token/`, 28 | args: [supply] 29 | }) 30 | TokenLib = new Token({ 31 | networkId, 32 | verbose: false, 33 | providers: { 34 | '999': web3.currentProvider 35 | }, 36 | contractAddress: OriginToken._address 37 | }) 38 | }) 39 | 40 | it('credits an account', async () => { 41 | const amount = 100 42 | await TokenLib.credit(networkId, nonOwner, amount) 43 | assert.equal(await TokenLib.balance(networkId, owner), supply - amount) 44 | assert.equal(await TokenLib.balance(networkId, nonOwner), amount) 45 | }) 46 | 47 | it('pauses and unpauses token transfers', async () => { 48 | await TokenLib.pause(networkId) 49 | assert(await OriginToken.methods.paused().call()) 50 | await TokenLib.unpause(networkId) 51 | assert(!(await OriginToken.methods.paused().call())) 52 | }) 53 | 54 | it('sets the owner of the token', async () => { 55 | const newOwner = accounts[1] 56 | TokenLib.validOwners = {} 57 | TokenLib.validOwners[networkId] = [ newOwner ] 58 | await TokenLib.setOwner(networkId, newOwner) 59 | assert.equal(newOwner, await TokenLib.owner(networkId)) 60 | }) 61 | 62 | it('does not set owner to non-whitelisted address', async () => { 63 | const invalidOwner = accounts[5] 64 | TokenLib.validOwners = {} 65 | TokenLib.validOwners[networkId] = [ owner ] 66 | try { 67 | await TokenLib.setOwner(networkId, invalidOwner) 68 | assert(false) 69 | } catch(e) { 70 | assert(e.message.match(/not a valid owner/)) 71 | } 72 | }) 73 | 74 | it('allows owner to be any address with an empty whitelist', async () => { 75 | const newOwner = accounts[1] 76 | TokenLib.validOwners = {} 77 | TokenLib.validOwners[networkId] = [ ] 78 | await TokenLib.setOwner(networkId, newOwner) 79 | assert.equal(newOwner, await TokenLib.owner(networkId)) 80 | }) 81 | 82 | it('sends a multi-sig transaction', async () => { 83 | const owners = accounts.slice(0, 3) 84 | const MultiSigWallet = await deploy('MultiSigWallet', { 85 | from: owner, 86 | path: `${__dirname}/contracts/`, 87 | args: [owners, 2] 88 | }) 89 | 90 | // Make the multi-sig wallet the contract owner. 91 | TokenLib.validOwners = {} 92 | TokenLib.validOwners[networkId] = [ MultiSigWallet._address ] 93 | await TokenLib.setOwner(networkId, MultiSigWallet._address) 94 | 95 | // Send pause contract call with 1 of 3 signatures. 96 | TokenLib.config.multisig = MultiSigWallet._address 97 | await TokenLib.pause(networkId) 98 | assert(!(await OriginToken.methods.paused().call())) 99 | 100 | // Confirm the multi-sig transaction with a second signature, which will 101 | // then pause the token. 102 | await MultiSigWallet.methods.confirmTransaction(0).send({ from: owners[1] }) 103 | assert(await OriginToken.methods.paused().call()) 104 | }) 105 | }) 106 | -------------------------------------------------------------------------------- /contracts/test-alt/UserRegistry.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import helper, { contractPath } from './_helper' 3 | 4 | describe('UserRegistry', async function() { 5 | let deploy, accounts, userRegistry 6 | 7 | beforeEach(async () => { 8 | ({ 9 | deploy, 10 | accounts 11 | } = await helper(`${__dirname}/..`)) 12 | }) 13 | 14 | it('should be able to register a user', async function() { 15 | userRegistry = await deploy('V00_UserRegistry', { 16 | from: accounts[0], 17 | path: `${contractPath}/identity/` 18 | }) 19 | 20 | const register = await userRegistry.methods.registerUser().send({ from: accounts[1] }) 21 | const identityAddress = await userRegistry.methods.users(accounts[1]).call() 22 | const newUserEvents = await userRegistry.getPastEvents('NewUser') 23 | const newUserEvent = newUserEvents.length && newUserEvents[0] 24 | assert.equal(identityAddress, accounts[1]) 25 | assert.equal(newUserEvent.returnValues['_address'], accounts[1]) 26 | assert.equal(newUserEvent.returnValues['_identity'], accounts[1]) 27 | }) 28 | }) 29 | -------------------------------------------------------------------------------- /contracts/test-alt/_gasPriceInDollars.js: -------------------------------------------------------------------------------- 1 | export default function({ gasPriceGwei = 8, pricePerEth = 500 }) { 2 | return function(gas) { 3 | const pricePerGwei = pricePerEth / 1000000000, 4 | priceInUsd = gas * gasPriceGwei * pricePerGwei 5 | return priceInUsd ? '$' + (Math.round(priceInUsd * 100) / 100).toFixed(2) : '' 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /contracts/test-alt/contracts/OriginArbitrator.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | /** 4 | * @title A Marketplace contract for managing listings, offers, payments, escrow and arbitration 5 | * @author Nick Poulden 6 | * 7 | * Listings may be priced in Eth or ERC20. 8 | */ 9 | 10 | import './arbitration/Arbitrable.sol'; 11 | 12 | contract Marketplace { 13 | function executeRuling(uint listingID, uint offerID, uint _ruling, uint _refund) public; 14 | } 15 | 16 | contract OriginArbitrator is Arbitrable { 17 | 18 | struct DisputeMap { 19 | uint listingID; 20 | uint offerID; 21 | uint refund; 22 | address marketplace; 23 | } // Maps back from disputeID to listing + offer 24 | mapping(uint => DisputeMap) public disputes; // disputeID => DisputeMap 25 | 26 | Arbitrator public arbitrator; // Address of arbitration contract 27 | 28 | constructor(Arbitrator _arbitrator) Arbitrable(_arbitrator, "", "") public { 29 | arbitrator = Arbitrator(_arbitrator); 30 | } 31 | 32 | function createDispute(uint listingID, uint offerID, uint refund) public returns (uint) { 33 | uint disputeID = arbitrator.createDispute(3, '0x00'); // 4 choices 34 | 35 | disputes[disputeID] = DisputeMap({ 36 | listingID: listingID, 37 | offerID: offerID, 38 | marketplace: msg.sender, 39 | refund: refund 40 | }); 41 | emit Dispute(arbitrator, disputeID, "Buyer wins;Seller wins"); 42 | return disputeID; 43 | } 44 | 45 | // @dev Called from arbitration contract 46 | function executeRuling(uint _disputeID, uint _ruling) internal { 47 | DisputeMap storage dispute = disputes[_disputeID]; 48 | Marketplace marketplace = Marketplace(dispute.marketplace); 49 | 50 | marketplace.executeRuling( 51 | dispute.listingID, 52 | dispute.offerID, 53 | _ruling, 54 | dispute.refund 55 | ); 56 | 57 | delete disputes[_disputeID]; // Save some gas by deleting dispute 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /contracts/test-alt/contracts/Spender.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "../../../node_modules/openzeppelin-solidity/contracts/token/ERC20/ERC20.sol"; 4 | 5 | contract Spender { 6 | ERC20 token; 7 | 8 | // These are read back by the contract tests. 9 | address public sender; 10 | bytes32 public storedBytes32; 11 | bool public storedBool; 12 | uint8 public storedUint8; 13 | uint32 public storedUint32; 14 | uint256 public storedUint256; 15 | int8 public storedInt8; 16 | int256 public storedInt256; 17 | 18 | constructor(address _token) public { 19 | token = ERC20(_token); 20 | } 21 | 22 | function transferTokens( 23 | address _sender, 24 | uint256 amount, 25 | bytes32 _bytes32, 26 | bool _bool, 27 | uint8 _uint8, 28 | uint32 _uint32, 29 | uint256 _uint256, 30 | int8 _int8, 31 | int256 _int256 32 | ) 33 | public 34 | payable 35 | { 36 | require(token.transferFrom(_sender, this, amount), "transferFrom failed"); 37 | sender = _sender; 38 | storedBytes32 = _bytes32; 39 | storedBool = _bool; 40 | storedUint8 = _uint8; 41 | storedUint32 = _uint32; 42 | storedUint256 = _uint256; 43 | storedInt8 = _int8; 44 | storedInt256 = _int256; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /contracts/test-alt/contracts/Token.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import '../../node_modules/openzeppelin-solidity/contracts/token/ERC20/StandardToken.sol'; 4 | 5 | contract Token is StandardToken { 6 | string public name; 7 | string public symbol; 8 | uint8 public decimals; 9 | 10 | constructor(string _name, string _symbol, uint8 _decimals, uint _supply) public { 11 | name = _name; 12 | symbol = _symbol; 13 | decimals = _decimals; 14 | totalSupply_ = _supply; 15 | balances[msg.sender] = _supply; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /contracts/test-alt/contracts/arbitration/ArbitrableExample.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import './Arbitrable.sol'; 4 | 5 | contract ArbitrableExample is Arbitrable { 6 | uint dispute; 7 | uint ruling; 8 | 9 | constructor(Arbitrator _arbitrator) Arbitrable(_arbitrator, "", "") public { 10 | } 11 | 12 | function executeRuling(uint _disputeID, uint _ruling) internal { 13 | dispute = _disputeID; 14 | ruling = _ruling; 15 | } 16 | 17 | function startDispute() payable { 18 | uint disputeID = arbitrator.createDispute(1, '0x00'); 19 | emit Dispute(arbitrator, disputeID, "Buyer wins;Seller wins"); 20 | } 21 | 22 | function submitEvidence(uint _disputeID, string _evidence) payable { 23 | emit Evidence(arbitrator, _disputeID, msg.sender, _evidence); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/README.md: -------------------------------------------------------------------------------- 1 | # Imported OpenZeppelin ERC-20 token tests 2 | 3 | This directory contains OpenZeppelin's token tests with the minimum set of 4 | modifications to work against the Origin token. 5 | 6 | One of the goals was to reduce the work needed to merge in relevant changes 7 | from the upstream repo by isolating changes. This is accomplished by ensuring 8 | that each test file imported from OpenZeppelin has these changes (at most): 9 | 10 | 1. A comment directing `eslint` to ignore formatting issues, because 11 | OpenZeppelin tests don't comply with our normal formatting guidelines. 12 | 2. Import mock constructor needed to deploy the token for the test. 13 | 14 | Each imported test file should have a 2 line diff from the original. 15 | 16 | These files were imported from OpenZeppelin **1.12.0**. This might be newer than 17 | the version we're importing, but this gets us increased test coverage with no 18 | downside. 19 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/helpers/assertJump.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | async function assertJump(promise) { 3 | try { 4 | await promise 5 | assert.fail('Expected invalid opcode not received') 6 | } catch (error) { 7 | const invalidOpcodeReceived = error.message.search('invalid opcode') >= 0 8 | assert( 9 | invalidOpcodeReceived, 10 | `Expected "invalid opcode", got ${error} instead` 11 | ) 12 | } 13 | } 14 | 15 | module.exports = { 16 | assertJump 17 | } 18 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/helpers/assertRevert.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | async function assertRevert(promise) { 3 | try { 4 | await promise 5 | } catch (error) { 6 | const revertFound = error.message.search('revert') >= 0 7 | assert(revertFound, `Expected "revert", got ${error} instead`) 8 | return 9 | } 10 | assert.fail('Expected revert not received') 11 | } 12 | 13 | module.exports = { 14 | assertRevert 15 | } 16 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/helpers/expectEvent.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | const should = require('chai').should() 3 | 4 | async function inLogs(logs, eventName, eventArgs = {}) { 5 | const event = logs.find(e => e.event === eventName) 6 | should.exist(event) 7 | for (const [k, v] of Object.entries(eventArgs)) { 8 | should.exist(event.args[k]) 9 | event.args[k].should.eq(v) 10 | } 11 | return event 12 | } 13 | 14 | async function inTransaction(tx, eventName, eventArgs = {}) { 15 | const { logs } = await tx 16 | return inLogs(logs, eventName, eventArgs) 17 | } 18 | 19 | module.exports = { 20 | inLogs, 21 | inTransaction 22 | } 23 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/helpers/originTokenMocks.js: -------------------------------------------------------------------------------- 1 | const OriginToken = artifacts.require('OriginToken') 2 | 3 | export const StandardTokenMock = { 4 | new: async function(owner, initialSupply) { 5 | return await OriginToken.new(initialSupply, { from: owner }) 6 | } 7 | } 8 | 9 | export const BasicTokenMock = StandardTokenMock 10 | 11 | export const BurnableTokenMock = StandardTokenMock 12 | 13 | export const MintableTokenMock = { 14 | new: async function(opts) { 15 | return await OriginToken.new(0, opts) 16 | } 17 | } 18 | 19 | export const PausableTokenMock = StandardTokenMock 20 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/token/ERC20/BasicToken.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | const { assertRevert } = require('../../helpers/assertRevert') 3 | import { BasicTokenMock as BasicToken } from '../../helpers/originTokenMocks' 4 | 5 | contract('StandardToken', function([_, owner, recipient, anotherAccount]) { 6 | const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000' 7 | 8 | beforeEach(async function() { 9 | this.token = await BasicToken.new(owner, 100) 10 | }) 11 | 12 | describe('total supply', function() { 13 | it('returns the total amount of tokens', async function() { 14 | const totalSupply = await this.token.totalSupply() 15 | 16 | assert.equal(totalSupply, 100) 17 | }) 18 | }) 19 | 20 | describe('balanceOf', function() { 21 | describe('when the requested account has no tokens', function() { 22 | it('returns zero', async function() { 23 | const balance = await this.token.balanceOf(anotherAccount) 24 | 25 | assert.equal(balance, 0) 26 | }) 27 | }) 28 | 29 | describe('when the requested account has some tokens', function() { 30 | it('returns the total amount of tokens', async function() { 31 | const balance = await this.token.balanceOf(owner) 32 | 33 | assert.equal(balance, 100) 34 | }) 35 | }) 36 | }) 37 | 38 | describe('transfer', function() { 39 | describe('when the recipient is not the zero address', function() { 40 | const to = recipient 41 | 42 | describe('when the sender does not have enough balance', function() { 43 | const amount = 101 44 | 45 | it('reverts', async function() { 46 | await assertRevert(this.token.transfer(to, amount, { from: owner })) 47 | }) 48 | }) 49 | 50 | describe('when the sender has enough balance', function() { 51 | const amount = 100 52 | 53 | it('transfers the requested amount', async function() { 54 | await this.token.transfer(to, amount, { from: owner }) 55 | 56 | const senderBalance = await this.token.balanceOf(owner) 57 | assert.equal(senderBalance, 0) 58 | 59 | const recipientBalance = await this.token.balanceOf(to) 60 | assert.equal(recipientBalance, amount) 61 | }) 62 | 63 | it('emits a transfer event', async function() { 64 | const { logs } = await this.token.transfer(to, amount, { 65 | from: owner 66 | }) 67 | 68 | assert.equal(logs.length, 1) 69 | assert.equal(logs[0].event, 'Transfer') 70 | assert.equal(logs[0].args.from, owner) 71 | assert.equal(logs[0].args.to, to) 72 | assert(logs[0].args.value.eq(amount)) 73 | }) 74 | }) 75 | }) 76 | 77 | describe('when the recipient is the zero address', function() { 78 | const to = ZERO_ADDRESS 79 | 80 | it('reverts', async function() { 81 | await assertRevert(this.token.transfer(to, 100, { from: owner })) 82 | }) 83 | }) 84 | }) 85 | }) 86 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/token/ERC20/BurnableToken.behaviour.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | const { assertRevert } = require('../../helpers/assertRevert') 3 | const { inLogs } = require('../../helpers/expectEvent') 4 | 5 | const BigNumber = web3.BigNumber 6 | const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000' 7 | 8 | require('chai') 9 | .use(require('chai-bignumber')(BigNumber)) 10 | .should() 11 | 12 | function shouldBehaveLikeBurnableToken([owner], initialBalance) { 13 | describe('as a basic burnable token', function() { 14 | const from = owner 15 | 16 | describe('when the given amount is not greater than balance of the sender', function() { 17 | const amount = 100 18 | 19 | beforeEach(async function() { 20 | ;({ logs: this.logs } = await this.token.burn(amount, { from })) 21 | }) 22 | 23 | it('burns the requested amount', async function() { 24 | const balance = await this.token.balanceOf(from) 25 | balance.should.be.bignumber.equal(initialBalance - amount) 26 | }) 27 | 28 | it('emits a burn event', async function() { 29 | const event = await inLogs(this.logs, 'Burn') 30 | event.args.burner.should.eq(owner) 31 | event.args.value.should.be.bignumber.equal(amount) 32 | }) 33 | 34 | it('emits a transfer event', async function() { 35 | const event = await inLogs(this.logs, 'Transfer') 36 | event.args.from.should.eq(owner) 37 | event.args.to.should.eq(ZERO_ADDRESS) 38 | event.args.value.should.be.bignumber.equal(amount) 39 | }) 40 | }) 41 | 42 | describe('when the given amount is greater than the balance of the sender', function() { 43 | const amount = initialBalance + 1 44 | 45 | it('reverts', async function() { 46 | await assertRevert(this.token.burn(amount, { from })) 47 | }) 48 | }) 49 | }) 50 | } 51 | 52 | module.exports = { 53 | shouldBehaveLikeBurnableToken 54 | } 55 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/token/ERC20/BurnableToken.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | const { shouldBehaveLikeBurnableToken } = require('./BurnableToken.behaviour') 3 | import { BurnableTokenMock } from '../../helpers/originTokenMocks' 4 | 5 | contract('BurnableToken', function([owner]) { 6 | const initialBalance = 1000 7 | 8 | beforeEach(async function() { 9 | this.token = await BurnableTokenMock.new(owner, initialBalance) 10 | }) 11 | 12 | shouldBehaveLikeBurnableToken([owner], initialBalance) 13 | }) 14 | -------------------------------------------------------------------------------- /contracts/test/openzeppelin/token/ERC20/MintableToken.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable semi,no-unused-vars,no-extra-semi */ 2 | const { shouldBehaveLikeMintableToken } = require('./MintableToken.behaviour') 3 | import { MintableTokenMock as MintableToken } from '../../helpers/originTokenMocks' 4 | 5 | contract('MintableToken', function([owner, anotherAccount]) { 6 | const minter = owner 7 | 8 | beforeEach(async function() { 9 | this.token = await MintableToken.new({ from: owner }) 10 | }) 11 | 12 | shouldBehaveLikeMintableToken([owner, anotherAccount, minter]) 13 | }) 14 | -------------------------------------------------------------------------------- /daemon/indexing/.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | 6 | -------------------------------------------------------------------------------- /daemon/indexing/.npmignore: -------------------------------------------------------------------------------- 1 | # See https://docs.npmjs.com/misc/developers#keeping-files-out-of-your-package for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | -------------------------------------------------------------------------------- /daemon/indexing/README.md: -------------------------------------------------------------------------------- 1 | ![origin_github_banner](https://user-images.githubusercontent.com/673455/37314301-f8db9a90-2618-11e8-8fee-b44f38febf38.png) 2 | 3 | # Origin Indexing Servers 4 | 5 | This directory contains code for Origin Indexing Servers: 6 | - listener: Server that listens to events emitted by Origin contracts and indexes them. 7 | - apollo: GraphQL server for indexed data 8 | - lib: library for indexing data in various backend. Currently Postgres and Elasticsearch are supported. 9 | 10 | _⚠️ Note that this is a standalone npm package apart from origin-js, and is not included in the distributed origin-js package. Once we move to a monorepo, it will be in a separate directory alongside origin-js._ 11 | 12 | ## To start the listener 13 | 14 | Use origin-box to start an origin-js container. 15 | 16 | docker-compose up origin-js 17 | 18 | If you want to index data in Postgres: 19 | 20 | docker-compose up postgres # start the postgres container. 21 | 22 | Run the database migrations: 23 | 24 | docker exec -ti -e DATABASE_URL=postgres://origin:origin@postgres/origin -w /app/daemon/indexing origin-js node node_modules/db-migrate/bin/db-migrate up 25 | 26 | If you want to index data in Elasticsearch, start the elasticsearch container. 27 | 28 | docker-compose up elasticsearch 29 | 30 | Start the listener in the the origin-js container. Use --elasticsearch and/or --db options to pick the indexer(s). 31 | 32 | docker exec -w /app/daemon/indexing origin-js node listener/listener.js --elasticsearch --db 33 | 34 | You should see messages in the console indicating events are being indexed. 35 | 36 | 37 | ## To start the Apollo GraphQL server 38 | 39 | You will need to update the origin-box:docker-compose.yml. For the image origin-js, proxy port 4000 to 4000 for the Apollo server. [TODO: update origin-box config] 40 | 41 | Use origin-box to start an origin-js container. 42 | 43 | docker-compose up origin-js 44 | 45 | Start the Apollo server in the origin-js container 46 | 47 | docker exec -w /app/daemon/indexing origin-js node apollo/index.js 48 | 49 | The server should start and you can point your browser to http://localhost:4000 to access the GraphQL playground. 50 | 51 | 52 | -------------------------------------------------------------------------------- /daemon/indexing/listener/README.md: -------------------------------------------------------------------------------- 1 | # Origin Event Listener 2 | 3 | The Origin Event Listener follows the blockchain, spotting origin.js events and passing those on to whatever systems need that data. These events are annotated with the full information about the origin resources (listings/offers) that fired off these events. 4 | 5 | The data from the listener can be used to build and keep up-to-date an offline index of all Origin Protocol data on the chain. 6 | 7 | The listener will let you know one or more times about an event. Make sure your webhook endpoint is idempotent, and can handle receiving the same data multiple times! 8 | 9 | To allow the listener to be compatible with [infura.io](https://infura.io/), it does not use subscriptions, only API queries. 10 | 11 | # Running 12 | 13 | First you'll need a blockchain network to listen to. To get a local network work, you can start up the origin box, or you can run `npm start run` from the origin.js directory. 14 | 15 | A simple way to see the listener in action: 16 | 17 | node daemon/indexing/listener/listener.js 18 | 19 | ## Command line options 20 | 21 | Output: 22 | 23 | `--verbose` Output json for all event information to stdout 24 | 25 | `--webhook=yoururl` Post json for each event to the URL 26 | 27 | `--elasticsearch` Experimental support for recording listings directly into elastic search 28 | 29 | `--db` Experimental support for recording listings directly into postgres (see instructions for setting up the db [here](../README.md)) 30 | 31 | Events: 32 | 33 | `--continue-file=path` Will start following events at the block number defined in the file, and will keep this file updated as it listens to events. The continue file is JSON, in the format `{"lastLogBlock":222, "version":1}`. 34 | 35 | 36 | # How the listener works 37 | 38 | The listener checks every few seconds for a new block number. If it sees one, it requests all origin related events from the last block it saw an event on, to the new block. 39 | 40 | For each of those events, the listener decodes them, annotates them with some useful fields, then runs a rule based on the event/contract to load additional information about the event through origin.js. For example, a `ListingCreated` event on a marketplace contract will have the results of `origin.marketplace.get` added to it. The code that uses the event listener output doesn't need to talk to the blockchain or IPFS at all. 41 | 42 | After being annotated with more information, the event is then output to the places set by the command line options. 43 | 44 | ## Error handling 45 | 46 | - If there is an error loading information about an origin.js object, then the listener will skip that event and continue to the next. Because of the design of the Origin Protocol, there is zero guarantees that the associated IPFS data for a resource will be valid, or even there at all. Anyone can put whatever they want there. 47 | 48 | - When an error is raised when outputting to specific output handler (webhook, db, etc), the listener will attempt retries with increasing delays, up to two minutes. These retries will block all further event processing until the event goes through. If a maximum number of retries on one event has failed, then listener will quit, allowing it to be restarted from outside. 49 | 50 | - When an error is raised when getting event or block number information, the same retry strategy as for output errors is tried (increasing delays). -------------------------------------------------------------------------------- /daemon/indexing/migrations/20180815061230-add-listing-table.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | var fs = require('fs'); 7 | var path = require('path'); 8 | var Promise; 9 | 10 | /** 11 | * We receive the dbmigrate dependency from dbmigrate initially. 12 | * This enables us to not have to rely on NODE_PATH. 13 | */ 14 | exports.setup = function(options, seedLink) { 15 | dbm = options.dbmigrate; 16 | type = dbm.dataType; 17 | seed = seedLink; 18 | Promise = options.Promise; 19 | }; 20 | 21 | exports.up = function(db) { 22 | var filePath = path.join(__dirname, 'sqls', '20180815061230-add-listing-table-up.sql'); 23 | return new Promise( function( resolve, reject ) { 24 | fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ 25 | if (err) return reject(err); 26 | console.log('received data: ' + data); 27 | 28 | resolve(data); 29 | }); 30 | }) 31 | .then(function(data) { 32 | return db.runSql(data); 33 | }); 34 | }; 35 | 36 | exports.down = function(db) { 37 | var filePath = path.join(__dirname, 'sqls', '20180815061230-add-listing-table-down.sql'); 38 | return new Promise( function( resolve, reject ) { 39 | fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ 40 | if (err) return reject(err); 41 | console.log('received data: ' + data); 42 | 43 | resolve(data); 44 | }); 45 | }) 46 | .then(function(data) { 47 | return db.runSql(data); 48 | }); 49 | }; 50 | 51 | exports._meta = { 52 | "version": 1 53 | }; 54 | -------------------------------------------------------------------------------- /daemon/indexing/migrations/20180820041726-add-offer-table.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var dbm; 4 | var type; 5 | var seed; 6 | var fs = require('fs'); 7 | var path = require('path'); 8 | var Promise; 9 | 10 | /** 11 | * We receive the dbmigrate dependency from dbmigrate initially. 12 | * This enables us to not have to rely on NODE_PATH. 13 | */ 14 | exports.setup = function(options, seedLink) { 15 | dbm = options.dbmigrate; 16 | type = dbm.dataType; 17 | seed = seedLink; 18 | Promise = options.Promise; 19 | }; 20 | 21 | exports.up = function(db) { 22 | var filePath = path.join(__dirname, 'sqls', '20180820041726-add-offer-table-up.sql'); 23 | return new Promise( function( resolve, reject ) { 24 | fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ 25 | if (err) return reject(err); 26 | console.log('received data: ' + data); 27 | 28 | resolve(data); 29 | }); 30 | }) 31 | .then(function(data) { 32 | return db.runSql(data); 33 | }); 34 | }; 35 | 36 | exports.down = function(db) { 37 | var filePath = path.join(__dirname, 'sqls', '20180820041726-add-offer-table-down.sql'); 38 | return new Promise( function( resolve, reject ) { 39 | fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){ 40 | if (err) return reject(err); 41 | console.log('received data: ' + data); 42 | 43 | resolve(data); 44 | }); 45 | }) 46 | .then(function(data) { 47 | return db.runSql(data); 48 | }); 49 | }; 50 | 51 | exports._meta = { 52 | "version": 1 53 | }; 54 | -------------------------------------------------------------------------------- /daemon/indexing/migrations/sqls/20180815061230-add-listing-table-down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE listing; 2 | -------------------------------------------------------------------------------- /daemon/indexing/migrations/sqls/20180815061230-add-listing-table-up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE listing ( 2 | id VARCHAR(32) PRIMARY KEY, 3 | seller_address CHAR(42) NOT NULL, -- Seller's ETH address. 20 bytes, in hexa notation. 4 | active BOOLEAN NOT NULL DEFAULT true, 5 | ipfs_hash CHAR(68) NOT NULL, -- JSON's data IPFS hash. 32 bytes in hexa notation (not base58 encoded). 6 | data JSONB NOT NULL 7 | ); 8 | 9 | CREATE INDEX listing_idx_seller_address_active ON listing(seller_address, active); 10 | -------------------------------------------------------------------------------- /daemon/indexing/migrations/sqls/20180820041726-add-offer-table-down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE offer; 2 | -------------------------------------------------------------------------------- /daemon/indexing/migrations/sqls/20180820041726-add-offer-table-up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE offer ( 2 | listing_id VARCHAR(32) NOT NULL, 3 | offer_id INT NOT NULL, 4 | status SMALLINT NOT NULL, 5 | seller_address CHAR(42) NOT NULL, -- Seller's ETH address. 20 bytes in hexa notation. 6 | buyer_address CHAR(42) NOT NULL, -- Buyer's ETH address. 20 bytes in hexa notation. 7 | ipfs_hash CHAR(68) NOT NULL, -- JSON's data IPFS hash. 32 bytes in hexa notation (not base58 encoded). 8 | data JSONB NOT NULL, 9 | PRIMARY KEY(listing_id, offer_id, status) 10 | ); 11 | 12 | CREATE INDEX offer_idx_seller_address_status ON offer(seller_address, status); 13 | CREATE INDEX offer_idx_buyer_address_status ON offer(buyer_address, status); 14 | -------------------------------------------------------------------------------- /daemon/indexing/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "origin-indexing", 3 | "description": "Indexing servers.", 4 | "version": "0.1.0", 5 | "dependencies": { 6 | "apollo-server": "^2.1.0", 7 | "db-migrate": "^0.11.1", 8 | "db-migrate-pg": "^0.4.0", 9 | "elasticsearch": "^15.1.1", 10 | "graphql": "^0.13.2", 11 | "http": "0.0.0", 12 | "origin": "^0.8.5", 13 | "pg": "^7.4.3", 14 | "url": "^0.11.0", 15 | "web3": "^1.0.0-beta.35" 16 | }, 17 | "devDependencies": {} 18 | } 19 | -------------------------------------------------------------------------------- /data_migrations/v0.6_listings_to_listings_registry/Listing_v0_1.json: -------------------------------------------------------------------------------- 1 | [{"constant":true,"inputs":[{"name":"_index","type":"uint256"}],"name":"getListing","outputs":[{"name":"","type":"uint256"},{"name":"","type":"address"},{"name":"","type":"bytes32"},{"name":"","type":"uint256"},{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_index","type":"uint256"},{"name":"_unitsToBuy","type":"uint256"}],"name":"buyListing","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[],"name":"listingsLength","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"owner_address","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_ipfsHash","type":"bytes32"},{"name":"_price","type":"uint256"},{"name":"_unitsAvailable","type":"uint256"}],"name":"create","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[],"name":"testingAddSampleListings","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"uint256"}],"name":"listings","outputs":[{"name":"lister","type":"address"},{"name":"ipfsHash","type":"bytes32"},{"name":"price","type":"uint256"},{"name":"unitsAvailable","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":false,"name":"_from","type":"address"}],"name":"UpdateListings","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"_index","type":"uint256"}],"name":"NewListing","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"_index","type":"uint256"},{"indexed":false,"name":"_unitsToBuy","type":"uint256"},{"indexed":false,"name":"_value","type":"uint256"}],"name":"ListingPurchased","type":"event"}] -------------------------------------------------------------------------------- /data_migrations/v0.6_listings_to_listings_registry/README.md: -------------------------------------------------------------------------------- 1 | Script to migrate listings that are stored in the original listings contract to using a listings registry to store the listings 2 | 3 | **Operating manual** 4 | 5 | After filling out the configuration, execute the script in read mode: 6 | ``` 7 | amberts-MacBook-98:v0.1_migration ambertho$ node migrate.js -c ./conf-test.json -d ./rinkeby_listings.json -a read 8 | Reading listings from: Rinkeby - Gateway: https://rinkeby.infura.io/ 9 | -------------------------------------------- 10 | Source contract address: 0x94dE52186b535cB06cA31dEb1fBd4541A824aC6d 11 | Found 289 listings. 12 | Retrieved 289 listings from source contract. 13 | Wrote 289 listings to data file: ./rinkeby_listings.json 14 | ``` 15 | 16 | yay! now we have a json file backup of the listings. 17 | 18 | then, execute the script in write mode: 19 | `amberts-MacBook-98:v0.1_migration ambertho$ node migrate.js -c ./conf-test.json -d ./ropsten_listings.json -a write` 20 | 21 | If there are duplicates the script will flag them: 22 | 23 | ``` 24 | Creating listings on: Local - Gateway: http://localhost:8545/ 25 | -------------------------------------------- 26 | Read 49 listings from data file. 27 | Destination contract address: 0x8f0483125fcb9aaaefa9209d8e9d7b9c8b9fb90f 28 | Gas multiplier: 1.2 29 | # Confirmations to wait: 6 30 | Creating listings using account: 0x627306090abab3a6e1400e9345bc60c78a8bef57 31 | -------------------------------------------- 32 | Starting # listings in ListingsRegistry: 288 33 | Checking for duplicates... 34 | Found duplicate listings: 0,1,2,3,4 35 | Please remove these entries from the datafile before running the migration. 36 | ``` 37 | 38 | Pretty self explanatory, just remove the duplicate listings. Optionally, backup the datafile before doing so. Rerun the script. 39 | 40 | ``` 41 | Checking for duplicates... 42 | No duplicate listings found. 43 | 44 | Press any key to start the migration. 45 | ``` 46 | The script will start scrolling output as it sends transactions, checks confirmations, and prints the results: 47 | 48 | ``` 49 | <<<<<<<<<<<<<< polling >>>>>>>>>>>>>>> 50 | current block: 371 51 | recieved receipts for: 1 listings. 52 | submitted: 0 | mined: 0 | confirmed: 44 53 | -------------------------------------------- 54 | Listings have 6 confirmations. 55 | Ending # listings in ListingsRegistry: 332 (44 created) 56 | -------------------------------------------- 57 | Results: 58 | 0 listing currently in submitted state: 59 | 0 listings currently in mined state: 60 | 44 listings migrated: 5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48 61 | 0 errors: 62 | -------------------------------------------- 63 | Checking data... 64 | Checksums of the created listings and the ones in the datafile match. 65 | ``` 66 | 67 | If the script needs to be stopped mid run or errors out, CTRL+C will print the output. The writes are pretty much atomic as far as I can tell (if you get a transaction receipt, the listing will get created. The next time the script is run, the duplicates check will tell you which listings have been written. 68 | 69 | There's a special case here: if the script needs to be run multiple times, only the first run will have the accurate number of starting listings (since subsequent runs will pick up the listings that have already been migrated). This line (https://github.com/OriginProtocol/origin-js/blob/issue/148_script_data_migration/scripts/v0.1_migration/migrate.js#L382) "startingNumListings" has to be set to the original number from the first run (if the ListingsRegistry was freshly created, it will be 0). Not doing so won't affect the actual migration, but the data integrity check at the end of the script uses the starting number of listings to query for the migrated data. 70 | -------------------------------------------------------------------------------- /data_migrations/v0.6_listings_to_listings_registry/conf-sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcNetworkName": "Ropsten or Rinkeby", 3 | "dstNetworkName": "Local, Ropsten, or Rinkeby", 4 | "srcGateway": "https://ropsten.infura.io, https://rinkeby.infura.io etc.", 5 | "dstGateway": "http://localhost:8545, https://ropsten.infura.io, etc.", 6 | "srcListingAddress_v0_1": "0xAddress. Rinkeby: 0x94dE52186b535cB06cA31dEb1fBd4541A824aC6d, Ropsten: 0xE66c9c6168d14bE4C3c145f91890740CbDf9EC8B", 7 | "dstListingsRegistryAddress_v0_2":"0xAddress", 8 | "mnemonic": "the mnemonic goes here", 9 | "gasSafetyMarginMultiplier": 1.2, 10 | "numConfirmations": 6 11 | } -------------------------------------------------------------------------------- /data_migrations/v0.7_listings_registry_to_storage/conf-rinkeby.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcNetworkName": "Rinkeby", 3 | "dstNetworkName": "Rinkeby", 4 | "srcGateway": "https://rinkeby.infura.io", 5 | "dstGateway": "https://rinkeby.infura.io", 6 | "listingsRegistryAddress": "0xf9555a250a36f59c6b74aabec018b8f55da72ec4", 7 | "newListingsRegistryAddress":"0xAddress", 8 | "mnemonic": "the mnemonic goes here", 9 | "gasSafetyMarginMultiplier": 1.2, 10 | "numConfirmations": 1 11 | } 12 | -------------------------------------------------------------------------------- /data_migrations/v0.7_listings_registry_to_storage/conf-ropsten.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcNetworkName": "Ropsten", 3 | "dstNetworkName": "Ropsten", 4 | "srcGateway": "https://ropsten.infura.io", 5 | "dstGateway": "https://ropsten.infura.io", 6 | "listingsRegistryAddress": "0x2861f28756e14cf0733383be8aa66ba3b65f1b4e", 7 | "newListingsRegistryAddress":"0xAddress", 8 | "mnemonic": "the mnemonic goes here", 9 | "gasSafetyMarginMultiplier": 1.2, 10 | "numConfirmations": 1 11 | } 12 | -------------------------------------------------------------------------------- /data_migrations/v0.7_listings_registry_to_storage/conf-sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcNetworkName": "Ropsten or Rinkeby", 3 | "dstNetworkName": "Local, Ropsten, or Rinkeby", 4 | "srcGateway": "https://ropsten.infura.io, https://rinkeby.infura.io etc.", 5 | "dstGateway": "http://localhost:8545, https://ropsten.infura.io, etc.", 6 | "listingsRegistryAddress": "0xAddress. Rinkeby: 0xf9555a250a36f59c6b74aabec018b8f55da72ec4, Ropsten: 0xE66c9c6168d14bE4C3c145f91890740CbDf9EC8B", 7 | "newListingsRegistryAddress":"0xAddress", 8 | "mnemonic": "the mnemonic goes here", 9 | "gasSafetyMarginMultiplier": 1.2, 10 | "numConfirmations": 1 11 | } 12 | -------------------------------------------------------------------------------- /scripts/build.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk') 2 | const fs = require('fs-extra') 3 | const startGanache = require('./helpers/start-ganache') 4 | const buildContracts = require('./helpers/build-contracts') 5 | const deployContracts = require('./helpers/deploy-contracts') 6 | const startIpfs = require('./helpers/start-ipfs') 7 | const startTestServer = require('./helpers/start-test-server') 8 | const watch = require('node-watch') 9 | const webpack = require('webpack') 10 | const webpackConfig = require('../webpack.config.js') 11 | 12 | const args = process.argv.slice(2) 13 | const shouldWatch = args.length && args[0] === 'serve' 14 | const noGanache = args.length && args[1] === 'no-ganache' 15 | 16 | /** 17 | * Copies compiled contracts from the latest release to 18 | * the contracts build directory. 19 | */ 20 | const copyReleaseCompiledContracts = (dstDir) => { 21 | // Get list of release directories. 22 | let dirs = fs.readdirSync('contracts/releases') 23 | dirs = dirs.filter(dir => (/^\d+\.\d+\.\d+$/.test(dir))) 24 | 25 | // Get latest release directory. 26 | const latestVersion = dirs.sort().reverse()[0] 27 | 28 | // Create build directory if it does not exist. 29 | if (!fs.pathExists(dstDir)) { 30 | fs.mkdirpSync(dstDir) 31 | } 32 | 33 | // Copy compiled contract files from latest release to the build directory. 34 | const srcDir = `contracts/releases/${latestVersion}/build/contracts` 35 | fs.copySync(srcDir, dstDir) 36 | console.log(chalk.green(`Copied compiled contracts from ${srcDir} to ${dstDir}`)) 37 | } 38 | 39 | const start = async () => { 40 | const compiler = webpack(webpackConfig) 41 | 42 | // If the contract build directory does not exist or is empty, 43 | // copy the compiled contract files from the latest release into it. 44 | const dstDir = 'contracts/build/contracts' 45 | if (fs.pathExistsSync(dstDir) && fs.readdirSync(dstDir).length > 0) { 46 | console.log(chalk.blue('Contracts build directory already exists and not empty, skipping copy.')) 47 | } else { 48 | copyReleaseCompiledContracts(dstDir) 49 | } 50 | if (shouldWatch) { 51 | if (!noGanache) { 52 | console.log( 53 | chalk`\n{bold.hex('#1a82ff') ⬢ Starting Local Blockchain }\n` 54 | ) 55 | await startGanache() 56 | } 57 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Deploying Smart Contracts }\n`) 58 | await deployContracts() 59 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Starting Local IPFS }\n`) 60 | await startIpfs() 61 | 62 | // watch contracts 63 | watch('./contracts/contracts', { recursive: true }, (evt, name) => { 64 | console.log('%s changed.', name) 65 | deployContracts() 66 | }) 67 | 68 | // watch js 69 | compiler.watch({}, (err, stats) => { 70 | if (err || stats.hasErrors()) { 71 | console.error(err) 72 | } else { 73 | console.log( 74 | stats.toString({ 75 | hash: false, 76 | modules: false, 77 | version: false 78 | }) 79 | ) 80 | } 81 | }) 82 | 83 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Starting Test Server }\n`) 84 | startTestServer() 85 | } else { 86 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Compiling Smart Contracts }\n`) 87 | await buildContracts() 88 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Compiling Webpack }\n`) 89 | compiler.run(err => { 90 | if (err) { 91 | console.log(err) 92 | } else { 93 | console.log('webpack compiled successfully') 94 | } 95 | }) 96 | } 97 | } 98 | 99 | start() 100 | -------------------------------------------------------------------------------- /scripts/helpers/build-contracts.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | const minifyContracts = require('./minify-contracts') 3 | 4 | const buildContracts = () => { 5 | return new Promise((resolve, reject) => { 6 | const truffleCompile = spawn('../node_modules/.bin/truffle', ['compile'], { 7 | cwd: './contracts' 8 | }) 9 | truffleCompile.stdout.pipe(process.stdout) 10 | truffleCompile.stderr.on('data', data => { 11 | reject(String(data)) 12 | }) 13 | truffleCompile.on('exit', code => { 14 | if (code === 0) { 15 | console.log('Truffle compile finished OK.') 16 | } 17 | minifyContracts() 18 | resolve() 19 | }) 20 | }) 21 | } 22 | 23 | module.exports = buildContracts 24 | -------------------------------------------------------------------------------- /scripts/helpers/deploy-contracts.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | const minifyContracts = require('./minify-contracts') 3 | 4 | const deployContracts = () => { 5 | return new Promise((resolve, reject) => { 6 | const truffleMigrate = spawn( 7 | '../node_modules/.bin/truffle', 8 | ['migrate', '--reset', '--compile-all'], 9 | { cwd: './contracts' } 10 | ) 11 | truffleMigrate.stdout.pipe(process.stdout) 12 | truffleMigrate.stderr.on('data', data => { 13 | reject(String(data)) 14 | }) 15 | truffleMigrate.on('exit', code => { 16 | if (code === 0) { 17 | console.log('Truffle migrate finished OK.') 18 | } 19 | minifyContracts() 20 | resolve() 21 | }) 22 | }) 23 | } 24 | 25 | module.exports = deployContracts 26 | -------------------------------------------------------------------------------- /scripts/helpers/minify-contracts.js: -------------------------------------------------------------------------------- 1 | /** 2 | * It's currently not possible to configure the contract JSON outputted by 3 | * Truffle. This script re-writes the contract JSON, pulling out only the 4 | * pieces needed by origin-js and significantly reducing the overall bundle 5 | * size 6 | */ 7 | 8 | const fs = require('fs') 9 | const contractDir = __dirname + '/../../contracts/build/contracts' 10 | 11 | module.exports = function() { 12 | const files = fs.readdirSync(contractDir) 13 | files.forEach(file => { 14 | const filePath = `${contractDir}/${file}` 15 | const contractJSON = fs.readFileSync(filePath).toString() 16 | const { abi, bytecode, contractName, networks, ast } = JSON.parse( 17 | contractJSON 18 | ) 19 | const simplifiedJSON = { abi, bytecode, contractName, networks, ast } 20 | fs.writeFileSync(filePath, JSON.stringify(simplifiedJSON, null, 4)) 21 | }) 22 | } 23 | -------------------------------------------------------------------------------- /scripts/helpers/start-ganache.js: -------------------------------------------------------------------------------- 1 | const Ganache = require('ganache-core') 2 | 3 | const PORT = 8545 4 | 5 | const startGanache = () => { 6 | return new Promise((resolve, reject) => { 7 | const server = Ganache.server({ 8 | total_accounts: 10, 9 | default_balance_ether: 100, 10 | network_id: 999, 11 | seed: 123, 12 | blocktime: 0, 13 | mnemonic: 14 | 'candy maple cake sugar pudding cream honey rich smooth crumble sweet treat' 15 | }) 16 | server.listen(PORT, err => { 17 | if (err) { 18 | return reject(err) 19 | } 20 | console.log(`Ganache listening on port ${PORT}`) 21 | resolve() 22 | }) 23 | }) 24 | } 25 | 26 | module.exports = startGanache 27 | -------------------------------------------------------------------------------- /scripts/helpers/start-ipfs.js: -------------------------------------------------------------------------------- 1 | const bs58 = require('bs58') 2 | const fs = require('fs') 3 | const ipfsAPI = require('ipfs-api') 4 | const HttpIPFS = require('ipfs/src/http') 5 | const ReadableStream = require('stream').Readable 6 | 7 | const fixturesDir = __dirname + '/../../test/fixtures' 8 | 9 | const startIpfs = () => 10 | new Promise((resolve, reject) => { 11 | const httpAPI = new HttpIPFS(undefined, { 12 | Addresses: { 13 | API: '/ip4/0.0.0.0/tcp/5002', 14 | Gateway: '/ip4/0.0.0.0/tcp/8080' 15 | } 16 | }) 17 | console.log('Start IPFS') 18 | httpAPI.start(true, async err => { 19 | if (err) { 20 | return reject(err) 21 | } 22 | console.log('Started IPFS') 23 | await populateIpfs() 24 | 25 | resolve() 26 | }) 27 | }) 28 | 29 | /** 30 | * Populate IPFS with sample listings from the fixtures directory. 31 | */ 32 | const populateIpfs = async () => { 33 | const ipfs = ipfsAPI('localhost', '5002', { protocol: 'http' }) 34 | 35 | console.log('Populating IPFS...') 36 | 37 | // fs.readdirSync always returns results sorted on unix based platforms 38 | // so the IPFS hashes will always be the same 39 | const listingDirectories = fs.readdirSync(fixturesDir) 40 | 41 | for (const listingDirectoryName of listingDirectories) { 42 | // Iterate over each directory in the fixtures dir 43 | const listingDirectory = fixturesDir + '/' + listingDirectoryName 44 | const stat = fs.statSync(listingDirectory) 45 | 46 | // Only process directories in the fixtures directory 47 | if (stat.isDirectory()) { 48 | // Grab the schema filename 49 | const schemaFilename = fs.readdirSync(listingDirectory).find(file => { 50 | return file.endsWith('json') 51 | }) 52 | if (!schemaFilename) { 53 | // No schema, don't proceed 54 | throw new Error(`Schema not found in ${listingDirectory}`) 55 | } 56 | 57 | // Get all the images from the listing directory 58 | const imagePaths = fs 59 | .readdirSync(listingDirectory) 60 | .filter(file => { 61 | return file.endsWith('jpg') || file.endsWith('png') 62 | }) 63 | .map(imageFilename => { 64 | return listingDirectory + '/' + imageFilename 65 | }) 66 | 67 | // Read the listing data 68 | const dataJson = fs.readFileSync(listingDirectory + '/' + schemaFilename) 69 | const data = JSON.parse(dataJson) 70 | // Preserve order of uploaded images to maintain IPFS hash 71 | // This is necessary because the hashes are hardcoded in contract migrations 72 | data.media = [] 73 | for (const imagePath of imagePaths) { 74 | const imageUpload = await ipfs.util.addFromFs(imagePath) 75 | const contentType = imagePath.endsWith('jpg') 76 | ? 'image/jpeg' 77 | : 'image/png' 78 | const medium = { 79 | url: `ipfs://${imageUpload[0]['hash']}`, 80 | contentType: contentType 81 | } 82 | data.media.push(medium) 83 | } 84 | 85 | // Update listing data to IPFS 86 | const stream = new ReadableStream() 87 | stream.push(JSON.stringify(data)) 88 | stream.push(null) 89 | const resp = await ipfs.add(stream) 90 | 91 | // Log some data. 92 | // TODO(franck): re-use ContractService.getBytes32FromIpfsHash 93 | const ipfsHash = resp[0].hash 94 | const bytes32 = 95 | '0x' + 96 | bs58 97 | .decode(ipfsHash) 98 | .slice(2) 99 | .toString('hex') 100 | console.log(`Uploaded fixture listing ${listingDirectoryName} to IPFS`) 101 | console.log(` IPFS Hash=${ipfsHash}`) 102 | console.log(` Bytes32 =${bytes32}`) 103 | } 104 | } 105 | } 106 | 107 | module.exports = startIpfs 108 | -------------------------------------------------------------------------------- /scripts/helpers/start-test-server.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | // Serve webpack dev server for browser testing 4 | const startTestServer = () => { 5 | console.log('Serving origin.js tests from http://localhost:8081') 6 | const webpackDevServer = spawn('./node_modules/.bin/webpack-dev-server', [ 7 | '--hot', 8 | '--config', 9 | 'test/webpack.config.js', 10 | '--host', 11 | '0.0.0.0' 12 | ]) 13 | webpackDevServer.stderr.pipe(process.stderr) 14 | } 15 | 16 | module.exports = startTestServer 17 | -------------------------------------------------------------------------------- /scripts/helpers/test-contracts.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | // When run with no arguments, this script runs all contract tests. It 4 | // optionally takes a single argument that specifies the path of a single 5 | // contract test to run. That path is relative to 'contracts/test'. 6 | const testContracts = () => { 7 | return new Promise((resolve, reject) => { 8 | const args = [ 9 | '-r', 10 | 'babel-register', 11 | '-r', 12 | 'babel-polyfill', 13 | '-t', 14 | '10000', 15 | '--exit', 16 | 'contracts/test-alt/' 17 | ] 18 | const contractTest = spawn('./node_modules/.bin/mocha', args) 19 | contractTest.stdout.pipe(process.stdout) 20 | contractTest.stderr.on('data', data => { 21 | reject(String(data)) 22 | }) 23 | contractTest.on('exit', code => { 24 | if (code === 0) { 25 | resolve() 26 | } else { 27 | reject('Contract tests failed') 28 | } 29 | }) 30 | }) 31 | } 32 | 33 | module.exports = testContracts 34 | -------------------------------------------------------------------------------- /scripts/helpers/test-javascript.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | const testJavascript = () => { 4 | return new Promise((resolve, reject) => { 5 | const mocha = spawn('./node_modules/.bin/mocha', [ 6 | '--compilers', 7 | 'js:babel-core/register', 8 | '--require', 9 | 'babel-polyfill', 10 | '--timeout', 11 | '10000', 12 | '--exit' 13 | ]) 14 | mocha.stdout.pipe(process.stdout) 15 | mocha.stderr.pipe(process.stderr) 16 | 17 | mocha.on('exit', code => { 18 | if (code !== 0) { 19 | return reject() 20 | } 21 | resolve() 22 | }) 23 | }) 24 | } 25 | 26 | module.exports = testJavascript 27 | -------------------------------------------------------------------------------- /scripts/helpers/test-js-format.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | const testJSFormat = () => { 4 | return new Promise((resolve, reject) => { 5 | const eslint = spawn('./node_modules/.bin/eslint', [ 6 | 'src/**/*.js', 7 | 'test/**/*test.js', 8 | 'contracts/test/**/*.js', 9 | 'scripts/**/*.js' 10 | ]) 11 | eslint.stdout.on('data', data => { 12 | reject(`JS formatter inspection failed:\n${String(data)}`) 13 | }) 14 | eslint.on('exit', code => { 15 | if (code === 0) { 16 | console.log('JS formatter inspection passed.') 17 | } 18 | resolve() 19 | }) 20 | }) 21 | } 22 | 23 | module.exports = testJSFormat 24 | -------------------------------------------------------------------------------- /scripts/helpers/test-solidity-format.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | const testSolidityFormat = () => { 4 | return new Promise((resolve, reject) => { 5 | const solium = spawn('./node_modules/.bin/solium', [ 6 | '-d', 7 | 'contracts/contracts/' 8 | ]) 9 | solium.stdout.pipe(process.stdout) 10 | solium.on('exit', code => { 11 | if (code === 0) { 12 | console.log('Solidity formatter inspection passed.') 13 | } else { 14 | reject(`Solidity formatter inspection failed.`) 15 | } 16 | resolve() 17 | }) 18 | }) 19 | } 20 | 21 | module.exports = testSolidityFormat 22 | -------------------------------------------------------------------------------- /scripts/helpers/test-truffle.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | 3 | // When run with no arguments, this script runs all contract tests. It 4 | // optionally takes a single argument that specifies the path of a single 5 | // contract test to run. That path is relative to 'contracts/test'. 6 | const testContracts = () => { 7 | return new Promise((resolve, reject) => { 8 | const testFile = process.argv[2] 9 | let truffleArgs 10 | if (testFile === undefined) { 11 | truffleArgs = ['test', '--compile-all'] 12 | } else { 13 | console.log('running ' + testFile) 14 | truffleArgs = ['test', 'test/' + testFile, '--compile-all'] 15 | } 16 | const truffleTest = spawn('../node_modules/.bin/truffle', truffleArgs, { 17 | cwd: './contracts' 18 | }) 19 | truffleTest.stdout.pipe(process.stdout) 20 | truffleTest.stderr.on('data', data => { 21 | reject(String(data)) 22 | }) 23 | truffleTest.on('exit', code => { 24 | if (code === 0) { 25 | resolve() 26 | } else { 27 | reject('Truffle contract tests failed') 28 | } 29 | }) 30 | }) 31 | } 32 | 33 | module.exports = testContracts 34 | -------------------------------------------------------------------------------- /scripts/test-env.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Start a long-running test environment. 3 | * Useful when running mocha tests in watch mode 4 | */ 5 | 6 | const chalk = require('chalk') 7 | const startGanache = require('./helpers/start-ganache') 8 | const deployContracts = require('./helpers/deploy-contracts') 9 | const startIpfs = require('./helpers/start-ipfs') 10 | 11 | const start = async () => { 12 | try { 13 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local Blockchain }\n`) 14 | await startGanache() 15 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Deploying Smart Contracts }\n`) 16 | await deployContracts() 17 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local IPFS }\n`) 18 | await startIpfs() 19 | } catch (error) { 20 | console.log(chalk`\n{bold ⚠️ Env failed. }\n`) 21 | console.error(error) 22 | } 23 | } 24 | 25 | start() 26 | -------------------------------------------------------------------------------- /scripts/test-js.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk') 2 | const { spawn } = require('child_process') 3 | const deployContracts = require('./helpers/deploy-contracts') 4 | const startIpfs = require('./helpers/start-ipfs') 5 | 6 | const startGanache = require('./helpers/start-ganache') 7 | 8 | const runTests = async watch => { 9 | return new Promise((resolve, reject) => { 10 | const args = ['-r', 'babel-register', '-r', 'babel-polyfill', '-t', '10000'] 11 | if (watch) { 12 | args.push('--watch') 13 | } else { 14 | args.push('--exit') 15 | } 16 | args.push('test') 17 | console.log('running mocha with args:', args.join(' ')) 18 | 19 | const contractTest = spawn('./node_modules/.bin/mocha', args) 20 | contractTest.stdout.pipe(process.stdout) 21 | contractTest.stderr.on('data', data => { 22 | reject(String(data)) 23 | }) 24 | contractTest.on('exit', code => { 25 | if (code === 0) { 26 | console.log(chalk`\n{bold ✅ JavaScript tests passed. :) }\n`) 27 | process.exit(0) 28 | } else { 29 | console.log(chalk`\n{bold ⚠️ JavaScript tests failed. :( }\n`) 30 | process.exit(1) 31 | } 32 | }) 33 | }) 34 | } 35 | 36 | const start = async () => { 37 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local Blockchain }\n`) 38 | await startGanache() 39 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Starting Local IPFS }\n`) 40 | await startIpfs() 41 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Deploying Smart Contracts }\n`) 42 | await deployContracts() 43 | 44 | const watch = process.argv[2] && process.argv[2] == '--watch' 45 | await runTests(watch) 46 | } 47 | 48 | start() 49 | -------------------------------------------------------------------------------- /scripts/test-truffle.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk') 2 | const startGanache = require('./helpers/start-ganache') 3 | const testTruffle = require('./helpers/test-truffle') 4 | const buildContracts = require('./helpers/build-contracts') 5 | const startIpfs = require('./helpers/start-ipfs') 6 | const watch = require('node-watch') 7 | 8 | // Simple enqueueing system to prevent interrupting a test. Rerunning in the middle of a test causes issues. 9 | let isRunning = false 10 | let isEnqueued = false 11 | const runTests = async () => { 12 | if (!isRunning) { 13 | isRunning = true 14 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Compiling Smart Contracts }\n`) 15 | await buildContracts() 16 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Testing Smart Contracts }\n`) 17 | await testTruffle() 18 | isRunning = false 19 | if (isEnqueued) { 20 | isEnqueued = false 21 | runTests() 22 | } 23 | } else { 24 | isEnqueued = true 25 | } 26 | } 27 | 28 | const start = async () => { 29 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local Blockchain }\n`) 30 | await startGanache() 31 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Starting Local IPFS }\n`) 32 | await startIpfs() 33 | 34 | runTests() 35 | 36 | // watch contracts 37 | watch( 38 | ['./contracts/contracts', './contracts/test'], 39 | { recursive: true }, 40 | async (evt, name) => { 41 | console.log('%s changed.', name) 42 | runTests() 43 | } 44 | ) 45 | } 46 | 47 | start() 48 | -------------------------------------------------------------------------------- /scripts/test.js: -------------------------------------------------------------------------------- 1 | const chalk = require('chalk') 2 | const testContracts = require('./helpers/test-contracts') 3 | const startGanache = require('./helpers/start-ganache') 4 | const deployContracts = require('./helpers/deploy-contracts') 5 | const testTruffle = require('./helpers/test-truffle') 6 | const startIpfs = require('./helpers/start-ipfs') 7 | const testJavascript = require('./helpers/test-javascript') 8 | const testJSFormat = require('./helpers/test-js-format') 9 | const testSolidityFormat = require('./helpers/test-solidity-format') 10 | 11 | const start = async () => { 12 | try { 13 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Testing JS Formatting }\n`) 14 | await testJSFormat() 15 | console.log(chalk`\n{bold.hex('#c63197') ⬢ Testing Solidity Formatting }\n`) 16 | await testSolidityFormat() 17 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Testing Smart Contracts }\n`) 18 | await testContracts() 19 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local Blockchain }\n`) 20 | await startGanache() 21 | console.log( 22 | chalk`\n{bold.hex('#1a82ff') ⬢ Testing Smart Contracts using Truffle }\n` 23 | ) 24 | await testTruffle() 25 | console.log(chalk`\n{bold.hex('#6e3bea') ⬢ Deploying Smart Contracts }\n`) 26 | await deployContracts() 27 | console.log(chalk`\n{bold.hex('#26d198') ⬢ Starting Local IPFS }\n`) 28 | await startIpfs() 29 | console.log(chalk`\n{bold.hex('#1a82ff') ⬢ Testing Javascript }\n`) 30 | await testJavascript() 31 | console.log(chalk`\n{bold ✅ Tests passed. :) }\n`) 32 | process.exit() 33 | } catch (error) { 34 | console.log(chalk`\n{bold ⚠️ Tests failed. :( }\n`) 35 | console.error(error) 36 | process.exit(1) 37 | } 38 | } 39 | 40 | start() 41 | -------------------------------------------------------------------------------- /src/README.md: -------------------------------------------------------------------------------- 1 | # origin.js source 2 | 3 | These files are the source code for origin.js. 4 | 5 | Files in `resources` are exposed to the client as the primary interface to origin.js, containing methods for creating listings, booking, etc... 6 | 7 | In general, files ending in `-service.js` are not directly exposed to the client, but are used by the resources. 8 | -------------------------------------------------------------------------------- /src/contractInterface/README.md: -------------------------------------------------------------------------------- 1 | # Ethereum Interface 2 | 3 | The contractInterface directory contains logic and provides an interface for interacting with Ethereum smart contracts. This includes handling of multiple contract versions over time and maintaining backwards compatibility. 4 | 5 | ## Resolvers 6 | 7 | Resolvers provide the interface to a particular contract, bringing all the adapters for the different contract versions together. 8 | 9 | ## Adapters 10 | 11 | An adapter is an interface for a particular contract version. All adapters for a contract should surface the same interface (input and output format), internally handling the implementation details for a specific contract version. 12 | -------------------------------------------------------------------------------- /src/contractInterface/users/resolver.js: -------------------------------------------------------------------------------- 1 | import V00_UsersAdapter from './v00_adapter' 2 | import UserObject from '../../models/user' 3 | 4 | class UsersResolver { 5 | constructor({ contractService, ipfsService, blockEpoch }) { 6 | this.adapters = { 7 | '000': new V00_UsersAdapter({ contractService, ipfsService, blockEpoch }) 8 | } 9 | this.versions = ['000'] 10 | this.currentVersion = this.versions[this.versions.length - 1] 11 | this.currentAdapter = this.adapters[this.currentVersion] 12 | } 13 | 14 | async set({ profile, attestations = [], options = {} }) { 15 | return this.currentAdapter.set({ profile, attestations, options }) 16 | } 17 | 18 | async get(address) { 19 | let result = false 20 | for (let i = this.versions.length - 1; i >= 0; i--) { 21 | if (!result) { 22 | const version = this.versions[i] 23 | result = await this.adapters[version].get(address) 24 | } 25 | } 26 | if (result) { 27 | return new UserObject(result) 28 | } else { 29 | return new UserObject({ address }) 30 | } 31 | } 32 | 33 | async identityAddress(wallet) { 34 | let result = false 35 | for (let i = this.versions.length - 1; i >= 0; i--) { 36 | if (!result) { 37 | const version = this.versions[i] 38 | result = await this.adapters[version].identityAddress(wallet) 39 | } 40 | } 41 | return result 42 | } 43 | } 44 | 45 | module.exports = UsersResolver 46 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import ContractService from './services/contract-service' 2 | import IpfsService from './services/ipfs-service' 3 | import { Attestations } from './resources/attestations' 4 | import Marketplace from './resources/marketplace' 5 | import Discovery from './resources/discovery' 6 | import Users from './resources/users' 7 | import Messaging from './resources/messaging' 8 | import Token from './resources/token' 9 | import fetch from 'cross-fetch' 10 | import store from 'store' 11 | 12 | const defaultBridgeServer = 'https://bridge.originprotocol.com' 13 | const defaultIpfsDomain = 'gateway.originprotocol.com' 14 | const defaultDiscoveryServerUrl = 'https://discovery.originprotocol.com' 15 | const defaultIpfsApiPort = '5002' 16 | const defaultIpfsGatewayPort = '443' 17 | const defaultIpfsGatewayProtocol = 'https' 18 | const defaultAttestationServerUrl = `${defaultBridgeServer}/api/attestations` 19 | const VERSION = require('.././package.json').version 20 | 21 | class Origin { 22 | constructor({ 23 | ipfsDomain = defaultIpfsDomain, 24 | ipfsApiPort = defaultIpfsApiPort, 25 | ipfsGatewayPort = defaultIpfsGatewayPort, 26 | ipfsGatewayProtocol = defaultIpfsGatewayProtocol, 27 | attestationServerUrl = defaultAttestationServerUrl, 28 | discoveryServerUrl = defaultDiscoveryServerUrl, 29 | affiliate, 30 | arbitrator, 31 | contractAddresses, 32 | web3, 33 | ipfsCreator, 34 | OrbitDB, 35 | ecies, 36 | messagingNamespace, 37 | blockEpoch 38 | } = {}) { 39 | this.version = VERSION 40 | 41 | this.contractService = new ContractService({ contractAddresses, web3 }) 42 | this.ipfsService = new IpfsService({ 43 | ipfsDomain, 44 | ipfsApiPort, 45 | ipfsGatewayPort, 46 | ipfsGatewayProtocol 47 | }) 48 | 49 | this.attestations = new Attestations({ 50 | serverUrl: attestationServerUrl, 51 | contractService: this.contractService, 52 | fetch, 53 | blockEpoch 54 | }) 55 | 56 | this.marketplace = new Marketplace({ 57 | contractService: this.contractService, 58 | ipfsService: this.ipfsService, 59 | affiliate, 60 | arbitrator, 61 | store, 62 | blockEpoch 63 | }) 64 | 65 | this.discovery = new Discovery({ 66 | discoveryServerUrl, 67 | fetch 68 | }) 69 | 70 | this.users = new Users({ 71 | contractService: this.contractService, 72 | ipfsService: this.ipfsService, 73 | blockEpoch 74 | }) 75 | 76 | this.messaging = new Messaging({ 77 | contractService: this.contractService, 78 | ipfsCreator, 79 | OrbitDB, 80 | ecies, 81 | messagingNamespace 82 | }) 83 | 84 | this.token = new Token({ 85 | contractService: this.contractService, 86 | ipfsService: this.ipfsService, 87 | marketplace: this.marketplace 88 | }) 89 | } 90 | } 91 | 92 | module.exports = Origin 93 | -------------------------------------------------------------------------------- /src/ipfsInterface/README.md: -------------------------------------------------------------------------------- 1 | # IPFS Interface 2 | 3 | The ipfsInterface directory contains logic and provides an interface for interacting with IPFS. This includes handling of multiple schema versions over time and maintaining backwards compatibility. 4 | 5 | ## Store 6 | TODO 7 | 8 | ## Adapters 9 | TODO 10 | 11 | ## Schemas 12 | TODO 13 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/adapter-base.js: -------------------------------------------------------------------------------- 1 | import Ajv from 'ajv' 2 | 3 | import listingSchemaV1 from '../schemas/listing.json' 4 | import listingWithdrawnSchemaV1 from '../schemas/listing-withdraw.json' 5 | import offerSchemaV1 from '../schemas/offer.json' 6 | import offerWithdrawnSchemaV1 from '../schemas/offer-withdraw.json' 7 | import offerAcceptedSchemaV1 from '../schemas/offer-accept.json' 8 | import disputeSchemaV1 from '../schemas/dispute.json' 9 | import resolutionSchemaV1 from '../schemas/resolution.json' 10 | import profileSchemaV1 from '../schemas/profile.json' 11 | import reviewSchemaV1 from '../schemas/review.json' 12 | 13 | const ajv = new Ajv({ allErrors: true }) 14 | // To use the draft-06 JSON schema, we need to explicitly add it to ajv. 15 | ajv.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json')) 16 | ajv.addSchema([ 17 | listingSchemaV1, 18 | listingWithdrawnSchemaV1, 19 | offerSchemaV1, 20 | offerWithdrawnSchemaV1, 21 | offerAcceptedSchemaV1, 22 | disputeSchemaV1, 23 | resolutionSchemaV1, 24 | profileSchemaV1, 25 | reviewSchemaV1 26 | ]) 27 | 28 | export default class AdapterBase { 29 | constructor(schemaId) { 30 | this.schemaId = schemaId 31 | } 32 | 33 | /** 34 | * Validates the data is compliant with Origin Protocol schema. 35 | * @throws {Error} If validation fails. 36 | */ 37 | validate(data) { 38 | if (data.schemaId !== this.schemaId) { 39 | throw new Error( 40 | `Unexpected schema version: ${data.schemaId} != ${ 41 | this.schemaId 42 | }` 43 | ) 44 | } 45 | 46 | const validator = ajv.getSchema(this.schemaId) 47 | if (!validator) { 48 | throw new Error(`Failed loading schema validator for ${this.schemaId}`) 49 | } 50 | if (!validator(data)) { 51 | throw new Error( 52 | `Data failed schema validation. 53 | Schema id: ${this.schemaId} 54 | Data: ${JSON.stringify(data)}. 55 | Errors: ${JSON.stringify(validator.errors)}` 56 | ) 57 | } 58 | } 59 | 60 | /** 61 | * Encodes data before storage. 62 | * This default encode method assumes data is already in proper format and just validates it. 63 | * @param data 64 | * @return {object} - Data to be written in storage. 65 | */ 66 | encode(data) { 67 | this.validate(data) 68 | return data 69 | } 70 | 71 | /** 72 | * Decodes data coming from storage. 73 | * In most cases derived class should override this default implementation which 74 | * validates the data against the schema and returns it without any alteration. 75 | * @param ipfsData 76 | */ 77 | decode(ipfsData) { 78 | this.validate(ipfsData) 79 | return Object.assign({}, ipfsData) 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/adapter-factory.js: -------------------------------------------------------------------------------- 1 | import DisputeAdapterV1 from './dispute/v1-dispute-adapter' 2 | import ListingAdapterV1 from './listing/v1-listing-adapter' 3 | import ListingWithdrawAdapterV1 from './listing-withdraw/v1-listing-withdraw-adapter' 4 | import OfferAdapterV1 from './offer/v1-offer-adapter' 5 | import OfferAcceptAdapterV1 from './offer-accept/v1-offer-accept-adapter' 6 | import OfferWithdrawAdapterV1 from './offer-withdraw/v1-offer-withdraw-adapter' 7 | import ProfileAdapterV1 from './profile/v1-profile-adapter' 8 | import ResolutionAdapterV1 from './resolution/v1-resolution-adapter' 9 | import ReviewAdapterV1 from './review/v1-review-adapter' 10 | 11 | const adapterConfig = { 12 | 'listing': { 13 | '1.0.0': ListingAdapterV1 14 | }, 15 | 'listing-withdraw': { 16 | '1.0.0': ListingWithdrawAdapterV1 17 | }, 18 | 'offer': { 19 | '1.0.0': OfferAdapterV1, 20 | }, 21 | 'offer-withdraw': { 22 | '1.0.0': OfferWithdrawAdapterV1 23 | }, 24 | 'offer-accept': { 25 | '1.0.0': OfferAcceptAdapterV1, 26 | }, 27 | 'dispute': { 28 | '1.0.0': DisputeAdapterV1, 29 | }, 30 | 'resolution': { 31 | '1.0.0': ResolutionAdapterV1, 32 | }, 33 | 'profile': { 34 | '1.0.0': ProfileAdapterV1, 35 | }, 36 | 'review': { 37 | '1.0.0': ReviewAdapterV1, 38 | } 39 | } 40 | 41 | /** 42 | * Returns an adapter based on a data type and version. 43 | * @param {string} schemaId - Unique ID of the schema to use. 44 | * @param {string} dataType - 'listing', 'offer', 'review', etc... 45 | * @param {string} schemaVersion - version of the schema to use. 46 | * @returns {SchemaAdapter} 47 | * @throws {Error} 48 | */ 49 | export default function adapterFactory(schemaId, dataType, schemaVersion) { 50 | if (!adapterConfig[dataType]) { 51 | throw new Error(`Unsupported data type: ${dataType}`) 52 | } 53 | if (!adapterConfig[dataType][schemaVersion]) { 54 | throw new Error( 55 | `Unsupported schema version ${schemaVersion} for data type ${dataType}` 56 | ) 57 | } 58 | const adapter = adapterConfig[dataType][schemaVersion] 59 | return new adapter(schemaId) 60 | } 61 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/dispute/v1-dispute-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class DisputeAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/listing-withdraw/v1-listing-withdraw-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class ListingWithdrawAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/listing/v1-listing-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | import Money from '../../../models/money' 3 | import URL from 'url-parse' 4 | 5 | export default class ListingAdapterV1 extends AdapterBase { 6 | /** 7 | * Rewrites IPFS media URLs to point to the configured IPFS gateway. 8 | * Applied after loading data from storage and decoding it. 9 | */ 10 | postProcessor(listing, ipfsService) { 11 | if (!listing.media) { 12 | return 13 | } 14 | for (const medium of listing.media) { 15 | medium.url = ipfsService.rewriteUrl(medium.url) 16 | } 17 | } 18 | 19 | /** 20 | * Uploads to IPFS content passed in as data URL. 21 | * Applied before encoding data and writing it to storage. 22 | */ 23 | async preProcessor(listing, ipfsService) { 24 | if (!listing.media) { 25 | return 26 | } 27 | 28 | // Only allow data:, dweb:, and ipfs: URLs 29 | listing.media = listing.media.filter(medium => { 30 | if (medium.url) { 31 | try { 32 | return ['data:', 'dweb:', 'ipfs:'].includes( 33 | new URL(medium.url).protocol 34 | ) 35 | } catch (error) { 36 | // Invalid URL, filter it out 37 | return false 38 | } 39 | } else { 40 | // No url. Invalid entry. 41 | return false 42 | } 43 | }) 44 | 45 | // Upload any data URL content to IPFS. 46 | const uploads = listing.media.map(async medium => { 47 | if (medium.url.startsWith('data:')) { 48 | const ipfsHash = await ipfsService.saveDataURIAsFile(medium.url) 49 | medium.url = `ipfs://${ipfsHash}` 50 | } 51 | }) 52 | return Promise.all(uploads) 53 | } 54 | 55 | /** 56 | * Populates an IpfsListing object based on listing data encoded with V1 schema. 57 | * @param {object} ipfsData - Listing data read from IPFS. 58 | * @returns {object} - Listing data. 59 | * @throws {Error} 60 | */ 61 | decode(ipfsData) { 62 | // Validate the data coming out of storage. 63 | this.validate(ipfsData) 64 | 65 | const listing = { 66 | schemaId: ipfsData.schemaId, 67 | type: ipfsData.listingType, 68 | category: ipfsData.category, 69 | subCategory: ipfsData.subCategory, 70 | language: ipfsData.language, 71 | title: ipfsData.title, 72 | description: ipfsData.description, 73 | media: ipfsData.media, 74 | expiry: ipfsData.expiry 75 | } 76 | 77 | // Unit data. 78 | if (listing.type === 'unit') { 79 | listing.unitsTotal = ipfsData.unitsTotal 80 | listing.price = new Money(ipfsData.price) 81 | listing.commission = ipfsData.commission 82 | ? new Money(ipfsData.commission) 83 | : null 84 | } else if (listing.type === 'fractional') { 85 | // TODO(franck): fill this in. 86 | } else { 87 | throw new Error(`Unexpected listing type: ${listing.type}`) 88 | } 89 | 90 | return listing 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/offer-accept/v1-offer-accept-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class OfferAcceptAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/offer-withdraw/v1-offer-withdraw-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class OfferWithdrawAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/offer/v1-offer-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | import Money from '../../../models/money' 3 | 4 | export default class OfferAdapterV1 extends AdapterBase { 5 | /** 6 | * Populates an IpfsOffer object based on offer data encoded using V1 schema. 7 | * @param {object} data - Listing data, expected to use schema V1. 8 | * @returns {object} - Offer data 9 | * @throws {Error} In case data validation fails. 10 | */ 11 | decode(ipfsData) { 12 | // Validate the data coming out of storage. 13 | this.validate(ipfsData) 14 | 15 | const offer = { 16 | schemaId: ipfsData.schemaId, 17 | listingType: ipfsData.listingType 18 | } 19 | 20 | // Unit data. 21 | if (offer.listingType === 'unit') { 22 | offer.unitsPurchased = ipfsData.unitsPurchased 23 | offer.totalPrice = new Money(ipfsData.totalPrice) 24 | } else if (offer.listingType === 'fractional') { 25 | // TODO(franck): fill this in. 26 | } else { 27 | throw new Error(`Unexpected listing type: ${offer.listingType}`) 28 | } 29 | 30 | return offer 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/profile/v1-profile-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class ProfileAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/resolution/v1-resolution-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class ResolutionAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/adapters/review/v1-review-adapter.js: -------------------------------------------------------------------------------- 1 | import AdapterBase from '../adapter-base' 2 | 3 | export default class ReviewAdapterV1 extends AdapterBase {} 4 | -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/dispute.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/dispute_v1.0.0", 4 | "title": "Origin Protocol offer dispute schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": ["schemaId"] 12 | } -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/listing-withdraw.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/listing-withdraw_v1.0.0", 4 | "title": "Origin Protocol listing withdrawal schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": ["schemaId"] 12 | } -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/notification.json: -------------------------------------------------------------------------------- 1 | { 2 | "$id":"http://json-schema.org/draft-06/schema", 3 | "title": "Notification", 4 | "type": "object", 5 | "required": ["id", "type", "status"], 6 | "properties": { 7 | "id": { 8 | "type": "string" 9 | }, 10 | "type": { 11 | "type": "string" 12 | }, 13 | "status": { 14 | "type": "string", 15 | "enum": [ 16 | "read", 17 | "unread" 18 | ] 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/offer-accept.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/offer-accept_v1.0.0", 4 | "title": "Origin Protocol offer acceptance schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": ["schemaId"] 12 | } -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/offer-withdraw.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/offer-withdraw_v1.0.0", 4 | "title": "Origin Protocol offer withdrawal schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": ["schemaId"] 12 | } -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/offer.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/offer_v1.0.0", 4 | "title": "Origin Protocol offer schema", 5 | "definitions": { 6 | "money": { 7 | "type": "object", 8 | "title": "An amount of money in a specified currency", 9 | "properties": { 10 | "currency": { 11 | "type": "string" 12 | }, 13 | "amount": { 14 | "type": "string" 15 | } 16 | }, 17 | "required": [ 18 | "currency", 19 | "amount" 20 | ] 21 | }, 22 | "timeSlot": { 23 | "type": "object", 24 | "required": [ 25 | "startDate", 26 | "endDate" 27 | ], 28 | "properties": { 29 | "startDate": { 30 | "type": "string", 31 | "format": "date-time" 32 | }, 33 | "endDate": { 34 | "type": "string", 35 | "format": "date-time" 36 | } 37 | }, 38 | "required": [ 39 | "startDate", 40 | "endDate" 41 | ] 42 | }, 43 | "unitOffer": { 44 | "type": "object", 45 | "properties": { 46 | "unitsPurchased": { 47 | "type": "integer" 48 | } 49 | }, 50 | "required": [ 51 | "unitsPurchased" 52 | ] 53 | }, 54 | "fractionalOffer": { 55 | "type": "object", 56 | "properties": { 57 | "slots": { 58 | "type": "array", 59 | "items": { 60 | "$ref": "#/definitions/timeSlot" 61 | } 62 | } 63 | }, 64 | "required": [ 65 | "slots" 66 | ] 67 | } 68 | }, 69 | "type": "object", 70 | "properties": { 71 | "schemaId": { 72 | "type": "string" 73 | }, 74 | "listingType": { 75 | "enum": [ 76 | "unit", 77 | "fractional" 78 | ] 79 | }, 80 | "totalPrice": { 81 | "$ref": "#/definitions/money" 82 | }, 83 | "commission": { 84 | "$ref": "#/definitions/money" 85 | } 86 | }, 87 | "oneOf": [ 88 | { 89 | "$ref": "#/definitions/unitOffer" 90 | }, 91 | { 92 | "$ref": "#/definitions/fractionalOffer" 93 | } 94 | ], 95 | "required": [ 96 | "schemaId", 97 | "listingType", 98 | "totalPrice", 99 | "commission" 100 | ] 101 | } 102 | -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/profile.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/profile_v1.0.0", 4 | "title": "Origin Protocol user profile schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | }, 10 | "firstName": { 11 | "type": "string" 12 | }, 13 | "lastName": { 14 | "type": "string" 15 | }, 16 | "description": { 17 | "type": "string" 18 | }, 19 | "avatar": { 20 | "type": "string" 21 | } 22 | }, 23 | "required": [ 24 | "schemaId", 25 | "firstName", 26 | "lastName" 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/resolution.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/resolution_v1.0.0", 4 | "title": "Origin Protocol offer resolution schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | } 10 | }, 11 | "required": ["schemaId"] 12 | } -------------------------------------------------------------------------------- /src/ipfsInterface/schemas/review.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema#", 3 | "$id": "http://schema.originprotocol.com/review_v1.0.0", 4 | "title": "Origin Protocol review schema", 5 | "type": "object", 6 | "properties": { 7 | "schemaId": { 8 | "type": "string" 9 | }, 10 | "rating": { 11 | "type": "number" 12 | }, 13 | "text": { 14 | "type": "string" 15 | } 16 | }, 17 | "required": [ 18 | "schemaId", 19 | "rating", 20 | "text" 21 | ] 22 | } -------------------------------------------------------------------------------- /src/models/attestation.js: -------------------------------------------------------------------------------- 1 | const topicMapping = { 2 | 3: 'facebook', 3 | 4: 'twitter', 4 | 5: 'airbnb', 5 | 10: 'phone', 6 | 11: 'email' 7 | } 8 | 9 | export default class Attestation { 10 | constructor({ topic, data, signature }) { 11 | topic = Number(topic) 12 | this.topic = topic 13 | this.service = topicMapping[topic] 14 | this.data = data 15 | this.signature = signature 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/models/listing.js: -------------------------------------------------------------------------------- 1 | // 2 | // Listing is the main object exposed by Origin Protocol to access listing data. 3 | // 4 | export class Listing { 5 | /** 6 | * A Listing is constructed based on its on-chain and off-chain data. 7 | * @param {string} listingId - Unique listing ID. 8 | * @param {Object} chainListing - Listing data from the blockchain. 9 | * @param {Object} ipfsListing - Listing data from IPFS. 10 | */ 11 | constructor(listingId, chainListing, ipfsListing) { 12 | this.id = listingId 13 | // FIXME(franck): Exposing directly the chain data will make it difficult 14 | // to support backward compatibility of the Listing interface in the future. We should 15 | // select and possibly abstract what data from the chain gets exposed. 16 | Object.assign(this, ipfsListing, chainListing) 17 | } 18 | 19 | get unitsSold() { 20 | // Lazy caching. 21 | if (this._unitsSold !== undefined) { 22 | return this._unitsSold 23 | } 24 | this._unitsSold = Object.keys(this.offers).reduce((acc, offerId) => { 25 | if (this.offers[offerId].status === 'created') { 26 | return acc + 1 27 | } 28 | // TODO: we need to subtract 1 for every offer that is canceled 29 | return acc 30 | }, 0) 31 | return this._unitsSold 32 | } 33 | 34 | get unitsRemaining() { 35 | // Should never be negative. 36 | return Math.max(this.unitsTotal - this.unitsSold, 0) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/models/money.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Helper class for handling money. 3 | * TODO(franck): 4 | * 1. Use BigNumber for handling amounts 5 | * 2. Implement operations such as mutiply, add, etc... 6 | */ 7 | class Money { 8 | constructor({ amount, currency }) { 9 | this.currency = currency 10 | this.amount = String(amount) 11 | } 12 | } 13 | 14 | export default Money 15 | -------------------------------------------------------------------------------- /src/models/notification.js: -------------------------------------------------------------------------------- 1 | const unreadStatus = 'unread' 2 | const readStatus = 'read' 3 | const notificationStatuses = [unreadStatus, readStatus] 4 | 5 | const storeKeys = { 6 | notificationSubscriptionStart: 'notification_subscription_start', 7 | notificationStatuses: 'notification_statuses' 8 | } 9 | 10 | class Notification { 11 | constructor({ id, event, type, status, resources = {} } = {}) { 12 | this.id = id 13 | this.event = event 14 | this.type = type 15 | this.status = status 16 | this.resources = resources 17 | } 18 | } 19 | 20 | module.exports = { 21 | Notification, 22 | readStatus, 23 | unreadStatus, 24 | notificationStatuses, 25 | storeKeys 26 | } 27 | -------------------------------------------------------------------------------- /src/models/offer.js: -------------------------------------------------------------------------------- 1 | // 2 | // Offer is the main object exposed by Origin Protocol to access offer data. 3 | // 4 | export class Offer { 5 | /** 6 | * An Offer is constructed based on its on-chain and off-chain data. 7 | * @param {string} offerId - Unique offer ID. 8 | * @param {string} listingId - Unique listing ID. 9 | * @param {Object} chainOffer - Offer data from the blockchain. 10 | * @param {Object} ipfsOffer - Offer data from IPFS. 11 | */ 12 | constructor(offerId, listingId, chainOffer, ipfsOffer) { 13 | this.id = offerId 14 | this.listingId = listingId 15 | this.status = chainOffer.status // 'created', 'accepted', 'disputed', 'finalized', 'sellerReviewed' 16 | this.createdAt = chainOffer.createdAt // Time in seconds since epoch. 17 | this.buyer = chainOffer.buyer 18 | this.events = chainOffer.events 19 | this.refund = chainOffer.refund 20 | 21 | // See src/schemas/offer.json for fields stored in IPFS offer data. 22 | Object.assign(this, ipfsOffer) 23 | } 24 | 25 | /** 26 | * Gets an event based on its name. 27 | * @param {string} name - Event name, as emitted by marketplace contract. Ex: 'OfferCreated'. 28 | * @return First event object found matching the name or undefined. 29 | */ 30 | event(name) { 31 | return this.events.find(l => l.event === name) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/models/review.js: -------------------------------------------------------------------------------- 1 | // 2 | // Review is the main object exposed by Origin Protocol to access buyer's review data. 3 | // 4 | export class Review { 5 | /** 6 | * Constructs a review based on blockchain and IPFS data. 7 | * @param {string} listingId 8 | * @param {string} offerId 9 | * @param {object} event - Blockchain event emitted when offer finalized. 10 | * @param {object} ipfsReview - Review data stored in IPFS. 11 | */ 12 | constructor(listingId, offerId, event, ipfsReview) { 13 | this.id = event.transactionHash 14 | this.listingId = listingId 15 | this.offerId = offerId 16 | this.reviewer = event.returnValues.party // Either buyer or seller. 17 | this.timestamp = event.timestamp // Time in seconds since Epoch. 18 | this.rating = ipfsReview.rating // Number between 1 and 5. 19 | this.text = ipfsReview.text 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/models/user.js: -------------------------------------------------------------------------------- 1 | export default class User { 2 | constructor({ address, profile, attestations, identityAddress } = {}) { 3 | this.address = address 4 | this.profile = profile 5 | this.attestations = attestations 6 | this.identityAddress = identityAddress 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /src/resources/discovery.js: -------------------------------------------------------------------------------- 1 | class Discovery { 2 | constructor({ discoveryServerUrl, fetch }) { 3 | this.discoveryServerUrl = discoveryServerUrl 4 | this.fetch = fetch 5 | } 6 | 7 | async query(graphQlQuery){ 8 | const url = this.discoveryServerUrl 9 | const resp = await this.fetch(url, { 10 | method: 'POST', 11 | body: JSON.stringify({ 12 | query: graphQlQuery 13 | }), 14 | headers: { 15 | 'Content-Type': 'application/json' 16 | } 17 | }, 18 | function(error){ 19 | if (error !== undefined) 20 | throw Error(`An error occured when reaching discovery server: ${error}`) 21 | }) 22 | 23 | if(resp.status !== 200){ 24 | //TODO: also report error message here 25 | throw Error(`Discovery server retuned unexpected status code ${resp.status} with error `) 26 | } 27 | return await resp.json() 28 | } 29 | 30 | /** 31 | * Issues a search request to the indexing server which returns Listings result as a promise. 32 | * This way the caller of the function can implement error checks when results is something 33 | * unexpected. To get JSON result caller should call `await searchResponse.json()` to get the 34 | * actual JSON. 35 | * @param searchQuery {string} general search query 36 | * @param filters {object} object with properties: name, value, valueType, operator 37 | * @returns {Promise} 38 | */ 39 | async search(searchQuery, numberOfItems, offset, filters = []) { 40 | // from page should be bigger than 0 41 | offset = Math.max(offset, 0) 42 | // clamp numberOfItems between 1 and 12 43 | numberOfItems = Math.min(Math.max(numberOfItems, 1), 100) 44 | const query = ` 45 | { 46 | listings ( 47 | searchQuery: "${searchQuery}" 48 | filters: [${filters 49 | .map(filter => { 50 | return ` 51 | { 52 | name: "${filter.name}" 53 | value: "${String(filter.value)}" 54 | valueType: ${filter.valueType} 55 | operator: ${filter.operator} 56 | } 57 | ` 58 | }) 59 | .join(',')}] 60 | page:{ 61 | offset: ${offset} 62 | numberOfItems: ${numberOfItems} 63 | } 64 | ) { 65 | nodes { 66 | id 67 | } 68 | offset 69 | numberOfItems 70 | totalNumberOfItems 71 | stats { 72 | maxPrice 73 | minPrice 74 | } 75 | } 76 | }` 77 | 78 | return this.query(query) 79 | } 80 | } 81 | 82 | module.exports = Discovery 83 | -------------------------------------------------------------------------------- /src/resources/token.js: -------------------------------------------------------------------------------- 1 | import OriginTokenContract from './../../contracts/build/contracts/OriginToken.json' 2 | 3 | // Token is a light wrapper around the OriginToken contract. 4 | class Token { 5 | /** 6 | * @constructor 7 | * @param {ContractService} contractService - Contract service 8 | * @param {Marketplace} marketplace - Marketplace (to get token address) 9 | */ 10 | constructor({ contractService, marketplace }) { 11 | this.contractService = contractService 12 | // In getContract(), we will retrieve the address of the Origin token 13 | // contract from the marketplace contract. 14 | this.getTokenAddress = async function() { 15 | return await marketplace.getTokenAddress() 16 | } 17 | } 18 | 19 | /** 20 | * Returns Origin token contract, loading it from the address stored in the 21 | * Marketplace contract. This *may* return an OriginToken contract whose 22 | * implementation is newer than the Marketplace contract. This ensures that 23 | * Origin.js has forward compatibility with token contracts, as long as we 24 | * don't change or remove existing token features. 25 | * @returns OriginToken contract 26 | */ 27 | async getContract() { 28 | if (!this.contract) { 29 | this.contractAddress = await this.getTokenAddress() 30 | const web3 = this.contractService.web3 31 | this.contract = new web3.eth.Contract( 32 | OriginTokenContract.abi, 33 | this.contractAddress 34 | ) 35 | this.decimals = await this.contract.methods.decimals().call() 36 | } 37 | } 38 | 39 | /** 40 | * Returns a balance for an address. 41 | */ 42 | async balanceOf(address) { 43 | await this.getContract() 44 | return await this.contract.methods.balanceOf(address).call() 45 | } 46 | 47 | /** 48 | * Returns true if transfers and approvals of tokens are paused at the 49 | * contract level, false if not. 50 | */ 51 | async isPaused() { 52 | await this.getContract() 53 | return await this.contract.methods.paused().call() 54 | } 55 | } 56 | 57 | export default Token 58 | -------------------------------------------------------------------------------- /src/resources/users.js: -------------------------------------------------------------------------------- 1 | import UsersResolver from '../contractInterface/users/resolver' 2 | 3 | class Users { 4 | constructor({ contractService, ipfsService, blockEpoch }) { 5 | this.resolver = new UsersResolver({ contractService, ipfsService, blockEpoch }) 6 | } 7 | 8 | /* possible options values: 9 | * - confirmationCallback(confirmationCount, transactionReceipt) -> called repeatedly after a transaction is mined 10 | * - transactionHashCallback(hash) -> called immediately when the transaction hash is received 11 | */ 12 | async set({ profile, attestations = [], options = {}}) { 13 | return this.resolver.set({ profile, attestations, options }) 14 | } 15 | 16 | async get(address) { 17 | return this.resolver.get(address) 18 | } 19 | } 20 | 21 | module.exports = Users 22 | -------------------------------------------------------------------------------- /src/utils/cookieStorage.js: -------------------------------------------------------------------------------- 1 | // implementation is a modification of cookied localstorage from: https://developer.mozilla.org/en-US/docs/Web/API/Storage/LocalStorage 2 | 3 | const DEFAULT_SECONDS_TIMEOUT = 3600 * 24 * 7 4 | 5 | export default class cookieStorage { 6 | constructor({ path, expireSeconds = DEFAULT_SECONDS_TIMEOUT }) { 7 | this.path = path 8 | this.expireSeconds = expireSeconds 9 | const cookies = typeof document === 'object' && document.cookie && document.cookie.match(/=/g) 10 | this.length = cookies ? cookies.length : 0 11 | } 12 | 13 | getItem(sKey) { 14 | if (!sKey || !this.hasOwnProperty(sKey)) { 15 | return null 16 | } 17 | return unescape( 18 | document.cookie.replace( 19 | new RegExp( 20 | '(?:^|.*;\\s*)' + 21 | escape(sKey).replace(/[-.+*]/g, '\\$&') + 22 | '\\s*\\=\\s*((?:[^;](?!;))*[^;]?).*' 23 | ), 24 | '$1' 25 | ) 26 | ) 27 | } 28 | 29 | key(nKeyId) { 30 | return unescape( 31 | document.cookie 32 | .replace(/\s*=(?:.(?!;))*$/, '') 33 | .split(/\s*=(?:[^;](?!;))*[^;]?;\s*/)[nKeyId] 34 | ) 35 | } 36 | 37 | setItem(sKey, sValue) { 38 | if (!sKey) { 39 | return 40 | } 41 | const expires = new Date() 42 | expires.setTime(expires.getTime() + 1000 * this.expireSeconds) 43 | document.cookie = 44 | escape(sKey) + 45 | '=' + 46 | escape(sValue) + 47 | '; expires=' + 48 | expires.toGMTString() + 49 | '; path=' + 50 | this.path 51 | this.length = document.cookie.match(/=/g).length 52 | } 53 | 54 | removeItem(sKey) { 55 | if (!sKey || !this.hasOwnProperty(sKey)) { 56 | return 57 | } 58 | document.cookie = 59 | escape(sKey) + '=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=/' 60 | this.length-- 61 | } 62 | 63 | hasOwnProperty(sKey) { 64 | return new RegExp( 65 | '(?:^|;\\s*)' + escape(sKey).replace(/[-.+*]/g, '\\$&') + '\\s*\\=' 66 | ).test(document.cookie) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/utils/id.js: -------------------------------------------------------------------------------- 1 | const separator = '-' 2 | 3 | function parseListingId(listingId) { 4 | if (typeof listingId !== 'string') { 5 | throw new Error(`Listing id ${listingId} must be a string`) 6 | } 7 | const exploded = listingId.split(separator) 8 | if (exploded.length < 3) { 9 | throw new Error(`Invalid listing id: ${listingId}`) 10 | } 11 | const [network, version, listingIndex] = exploded 12 | return { network, version, listingIndex } 13 | } 14 | 15 | function generateListingId({ version, network, listingIndex }) { 16 | return [network, version, listingIndex].join(separator) 17 | } 18 | 19 | function parseOfferId(offerId) { 20 | if (typeof offerId !== 'string') { 21 | throw new Error(`Offer id ${offerId} must be a string`) 22 | } 23 | const exploded = offerId.split(separator) 24 | if (exploded.length < 4) { 25 | throw new Error(`Invalid offer id: ${offerId}`) 26 | } 27 | const [network, version, listingIndex, offerIndex] = exploded 28 | return { network, version, listingIndex, offerIndex } 29 | } 30 | 31 | function generateOfferId({ version, network, listingIndex, offerIndex }) { 32 | return [network, version, listingIndex, offerIndex].join(separator) 33 | } 34 | 35 | function parseNotificationId(notificationId) { 36 | if (typeof notificationId !== 'string') { 37 | throw new Error(`Notification id ${notificationId} must be a string`) 38 | } 39 | const exploded = notificationId.split(separator) 40 | if (exploded.length < 3) { 41 | throw new Error(`Invalid notification id: ${notificationId}`) 42 | } 43 | const [network, version, transactionHash] = exploded 44 | return { network, version, transactionHash } 45 | } 46 | 47 | function generateNotificationId({ version, network, transactionHash }) { 48 | return [network, version, transactionHash].join(separator) 49 | } 50 | 51 | module.exports = { 52 | parseListingId, 53 | generateListingId, 54 | parseOfferId, 55 | generateOfferId, 56 | parseNotificationId, 57 | generateNotificationId 58 | } 59 | -------------------------------------------------------------------------------- /src/utils/retries.js: -------------------------------------------------------------------------------- 1 | const MAX_RETRY_WAIT_MS = 2 * 60 * 1000 2 | 3 | /** 4 | * Retries up to maxRetries times. 5 | * @param {object} opts - Options (maxRetries, verbose) 6 | * @param {function} fn - Async function to retry. 7 | * @returns - Return value of 'fn' if it succeeded. 8 | */ 9 | async function withRetries(opts, fn) { 10 | const maxRetries = opts.maxRetries || 7 11 | const verbose = opts.verbose || false 12 | 13 | let tryCount = 0 14 | while (tryCount < maxRetries) { 15 | try { 16 | return await fn() // Do our action. 17 | } catch (e) { 18 | // Double wait time each failure 19 | let waitTime = 1000 * 2**(tryCount - 1) 20 | // Randomly jiggle wait time by 20% either way. No thundering herd. 21 | waitTime = Math.floor(waitTime * (1.2 - Math.random() * 0.4)) 22 | // Max out at two minutes 23 | waitTime = Math.min(waitTime, MAX_RETRY_WAIT_MS) 24 | if (verbose) { 25 | console.log('retryable error:', e.message) 26 | console.log(`will retry in ${waitTime / 1000} seconds`) 27 | } 28 | tryCount += 1 29 | await new Promise(resolve => setTimeout(resolve, waitTime)) 30 | } 31 | } 32 | throw new Error('number of retries exceeded') 33 | } 34 | 35 | module.exports = { withRetries } 36 | -------------------------------------------------------------------------------- /test/contract-service.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import ContractService from '../src/services/contract-service' 3 | import { ipfsHashes } from './fixtures' 4 | import Money from '../src/models/money' 5 | import Web3 from 'web3' 6 | 7 | const methodNames = ['getBytes32FromIpfsHash', 'getIpfsHashFromBytes32'] 8 | 9 | describe('ContractService', function() { 10 | this.timeout(5000) // default is 2000 11 | 12 | let contractService 13 | 14 | beforeEach(async () => { 15 | const provider = new Web3.providers.HttpProvider('http://localhost:8545') 16 | const web3 = new Web3(provider) 17 | contractService = new ContractService({ 18 | web3, 19 | currencies: { 20 | FOO: { address: '0x1234', decimals: 3 }, 21 | BAR: { address: '0x1234' } 22 | } 23 | }) 24 | }) 25 | 26 | methodNames.forEach(methodName => { 27 | it(`should have ${methodName} method`, () => { 28 | expect(contractService[methodName]).to.be.an.instanceof(Function) 29 | }) 30 | }) 31 | 32 | describe('getBytes32FromIpfsHash', () => { 33 | ipfsHashes.forEach(({ ipfsHash, bytes32 }) => { 34 | it(`should correctly convert from IPFS hash ${ipfsHash}`, () => { 35 | const result = contractService.getBytes32FromIpfsHash(ipfsHash) 36 | expect(result).to.equal(bytes32) 37 | }) 38 | }) 39 | }) 40 | 41 | describe('getIpfsHashFromBytes32', () => { 42 | ipfsHashes.forEach(({ ipfsHash, bytes32 }) => { 43 | it(`should correctly convert to IPFS hash ${ipfsHash}`, () => { 44 | const result = contractService.getIpfsHashFromBytes32(bytes32) 45 | expect(result).to.equal(ipfsHash) 46 | }) 47 | }) 48 | }) 49 | 50 | describe('moneyToUnits', () => { 51 | beforeEach(async () => { 52 | contractService._currencies = { 53 | FOO: { address: '0x1234', decimals: 3 }, 54 | BAR: { address: '0x1234' } 55 | } 56 | }) 57 | 58 | it(`should handle ERC20 token`, async () => { 59 | const money = new Money({ amount: 123, currency: 'FOO' }) 60 | const units = await contractService.moneyToUnits(money) 61 | expect(units).to.equal('123000') 62 | }) 63 | 64 | it(`should handle ETH`, async () => { 65 | const money = new Money({ amount: 123, currency: 'ETH' }) 66 | const units = await contractService.moneyToUnits(money) 67 | expect(units).to.equal('123000000000000000000') 68 | }) 69 | 70 | it(`should handle undefined currency decimals`, async () => { 71 | const money = new Money({ amount: 123, currency: 'BAR' }) 72 | const units = await contractService.moneyToUnits(money) 73 | expect(units).to.equal('123') 74 | }) 75 | }) 76 | 77 | describe('passing in contract addresses', () => { 78 | it('should allow contract addresses to be overridden', () => { 79 | const web3 = new Web3() 80 | const userAddress = '0x1234567890123456789012345678901234567890' 81 | const contractAddresses = { 82 | V00_UserRegistry: { 4: { address: userAddress } } 83 | } 84 | 85 | const contSrv = new ContractService({ web3, contractAddresses }) 86 | 87 | expect(contSrv.contracts.V00_UserRegistry.networks[4].address).to.equal( 88 | userAddress 89 | ) 90 | }) 91 | }) 92 | 93 | describe('currencies', () => { 94 | it('should include OGN', async () => { 95 | const currencies = await contractService.currencies() 96 | expect(currencies).to.be.an('object') 97 | const OGN = currencies.OGN 98 | expect(OGN).to.be.an('object') 99 | expect(OGN.address).to.be.a('string') 100 | expect(OGN.address).to.include('0x') 101 | expect(OGN.decimals).to.equal(18) 102 | }) 103 | }) 104 | }) 105 | -------------------------------------------------------------------------------- /test/fixtures.js: -------------------------------------------------------------------------------- 1 | export const ipfsHashes = [ 2 | { 3 | ipfsHash: "QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG", 4 | bytes32: 5 | "0x9d6c2be50f706953479ab9df2ce3edca90b68053c00b3004b7f0accbe1e8eedf", 6 | url: { 7 | default: 8 | "https://gateway.originprotocol.com/ipfs/QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG", 9 | local: 10 | "http://127.0.0.1:8080/ipfs/QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG" 11 | } 12 | }, 13 | { 14 | ipfsHash: "QmWHyrPWQnsz1wxHR219ooJDYTvxJPyZuDUPSDpdsAovN5", 15 | bytes32: 16 | "0x762e65d7166d5cf0eed8541a0b55a79002774f6ef086619202ef749a1bf8f3ba", 17 | url: { 18 | default: 19 | "https://gateway.originprotocol.com/ipfs/QmWHyrPWQnsz1wxHR219ooJDYTvxJPyZuDUPSDpdsAovN5", 20 | local: 21 | "http://127.0.0.1:8080/ipfs/QmWHyrPWQnsz1wxHR219ooJDYTvxJPyZuDUPSDpdsAovN5" 22 | } 23 | } 24 | ] 25 | 26 | export const listings = [ 27 | { 28 | data: { 29 | foo: "bar" 30 | }, 31 | ipfsHash: "Qmbjig3cZbUUufWqCEFzyCppqdnmQj3RoDjJWomnqYGy1f" 32 | } 33 | ] 34 | -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/hawaii-house.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.housing", 5 | "subCategory": "schema.housing.vacationRentals", 6 | "language": "en-US", 7 | "title": "MAMALAHOA ESTATE", 8 | "description": "Built on the slopes of Hualalai Mountain in Kailua, Hawaii, the Mamalahoa Estate knows how to make a first impression. You enter the property through a grove of citrus and macadamia trees. A floating walkway takes you across a koi pond, surrounded by lush greenery and a waterfall. Once inside, the 5,391-square-foot home is comprised of a master and two guest suites, each with a private staircase leading down to the garden courtyard. A chef's kitchen with koa cabinetry looks onto a double-height living area. Flanked by sliding doors, the room opens to a veranda that overlooks two swimming pools and the Kona coastline. Consisting of 90 acres, the grounds also feature a driving range, tennis court, bocce courts, and a three-car garage.", 9 | "unitsTotal": 1, 10 | "price": { 11 | "currency": "ETH", 12 | "amount": "8.5" 13 | }, 14 | "commission": { 15 | "currency": "OGN", 16 | "amount": "10" 17 | } 18 | } -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/hawaii-house/image-1.jpg -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/hawaii-house/image-2.jpg -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/hawaii-house/image-3.jpg -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/hawaii-house/image-4.jpg -------------------------------------------------------------------------------- /test/fixtures/hawaii-house/image-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/hawaii-house/image-5.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-1.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-2.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-3.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-4.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-5.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/image-6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/lake-house/image-6.jpg -------------------------------------------------------------------------------- /test/fixtures/lake-house/lake-house.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.housing", 5 | "subCategory": "schema.housing.vacationRentals", 6 | "language": "en-US", 7 | "title": "Casa Wolf", 8 | "description": "Overlooking Lake Llanquihue, Casa Wulf is inspired by the terrain. The home sits on a steep slope. This lead to its three-story design, creating a natural balcony facing the water. Among the levels, the main living area is at the center, with the bedrooms above and a basement workshop below. Each floor was constructed using a different system, resulting in a range of facades. Their orientation takes advantage of the incoming sunlight and while also exposing the interiors to the surrounding landscape.", 9 | "unitsTotal": 1, 10 | "price": { 11 | "currency": "ETH", 12 | "amount": "1.5" 13 | }, 14 | "commission": { 15 | "currency": "OGN", 16 | "amount": "10" 17 | } 18 | } -------------------------------------------------------------------------------- /test/fixtures/listing-valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.forSale", 5 | "subCategory": "schema.forSale.mushrooms", 6 | "language": "en-US", 7 | "title": "my listing", 8 | "description": "my description", 9 | "expiry": "1996-12-19T16:39:57-08:00", 10 | "media": [ 11 | { 12 | "url": "ipfs://ddfsfsdsdsddf", 13 | "contentType": "image/png", 14 | "dimension": { 15 | "height": 300, 16 | "width": 600 17 | } 18 | }, 19 | { 20 | "url": "ipfs://ddfsfsdsdsddf", 21 | "contentType": "image/jpeg", 22 | "dimension": { 23 | "height": 300, 24 | "width": 600 25 | } 26 | } 27 | ], 28 | "unitsTotal": 1, 29 | "price": { 30 | "currency": "ETH", 31 | "amount": "0.033" 32 | }, 33 | "commission": { 34 | "currency": "OGN", 35 | "amount": "0" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test/fixtures/offer-valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/offer_v1.0.0", 3 | "listingType": "unit", 4 | "unitsPurchased": 1, 5 | "totalPrice": { 6 | "currency": "ETH", 7 | "amount": "0.033" 8 | }, 9 | "commission": { 10 | "currency": "OGN", 11 | "amount": "0" 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/fixtures/profile-valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/profile_v1.0.0", 3 | "firstName": "originus", 4 | "lastName": "protocolus", 5 | "description": "Semper Fidelis", 6 | "avatar": "data:,Avatar" 7 | } 8 | -------------------------------------------------------------------------------- /test/fixtures/review-valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/review_v1.0.0", 3 | "rating": 3, 4 | "text": "Good stuff" 5 | } -------------------------------------------------------------------------------- /test/fixtures/scout/image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/scout/image-1.jpg -------------------------------------------------------------------------------- /test/fixtures/scout/image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/scout/image-2.jpg -------------------------------------------------------------------------------- /test/fixtures/scout/image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/scout/image-3.jpg -------------------------------------------------------------------------------- /test/fixtures/scout/image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/scout/image-4.jpg -------------------------------------------------------------------------------- /test/fixtures/scout/scout.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.forSale", 5 | "subCategory": "schema.forSale.carsTrucks", 6 | "language": "en-US", 7 | "title": "1977 INTERNATIONAL SCOUT II", 8 | "description": "Introduced in 1971, the International Scout II rode on a stretched-wheelbase version of the rugged Scout chassis as a competitor to trucks like the larger Chevrolet Blazer. The highly customizable Scout was popular for work and racing, taking home a class win in the 1977 Baja 1000. This restored beautifully restored 1977 Scout II's customizations run more than skin deep, with a 6.0-liter GM engine and transmission to go along with the wheels and suspension lift.", 9 | "unitsTotal": 1, 10 | "price": { 11 | "currency": "ETH", 12 | "amount": "0.6" 13 | }, 14 | "commission": { 15 | "currency": "OGN", 16 | "amount": "10" 17 | } 18 | } -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/taylor-swift-tix/image-1.jpg -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/taylor-swift-tix/image-2.jpg -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/taylor-swift-tix/image-3.jpg -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/taylor-swift-tix/image-4.jpg -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/image-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/taylor-swift-tix/image-5.jpg -------------------------------------------------------------------------------- /test/fixtures/taylor-swift-tix/taylor-swift-tix.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.tickets", 5 | "subCategory": "schema.tickets.music", 6 | "language": "en-US", 7 | "title": "Taylor Swift's Reputation Tour", 8 | "description": "Taylor Swift's Reputation Stadium Tour is the fifth world concert tour by American singer-songwriter Taylor Swift, in support of her sixth studio album, Reputation.", 9 | "unitsTotal": 1, 10 | "price": { 11 | "currency": "ETH", 12 | "amount": "0.3" 13 | }, 14 | "commission": { 15 | "currency": "OGN", 16 | "amount": "10" 17 | } 18 | } -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-1 -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-1.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-10.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-2.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-3.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-4.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-5.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-7.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-8.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/image-9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/test/fixtures/zinc-house/image-9.jpg -------------------------------------------------------------------------------- /test/fixtures/zinc-house/zinc-house.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemaId": "http://schema.originprotocol.com/listing_v1.0.0", 3 | "listingType": "unit", 4 | "category": "schema.housing", 5 | "subCategory": "schema.housing.vacationRentals", 6 | "language": "en-US", 7 | "title": "Zinc House", 8 | "description": "Overlooking Lake Llanquihue, Casa Wulf is inspired by the terrain. The home sits on a steep slope. This lead to its three-story design, creating a natural balcony facing the water. Among the levels, the main living area is at the center, with the bedrooms above and a basement workshop below. Each floor was constructed using a different system, resulting in a range of facades. Their orientation takes advantage of the incoming sunlight and while also exposing the interiors to the surrounding landscape.", 9 | "unitsTotal": 1, 10 | "price": { 11 | "currency": "ETH", 12 | "amount": "3.999" 13 | }, 14 | "commission": { 15 | "currency": "OGN", 16 | "amount": "10" 17 | } 18 | } -------------------------------------------------------------------------------- /test/helpers/as-account.js: -------------------------------------------------------------------------------- 1 | export default async function asAccount(web3, account, fn) { 2 | const accounts = await web3.eth.getAccounts() 3 | const accountBefore = web3.eth.defaultAccount 4 | web3.eth.defaultAccount = account 5 | const result = await fn() 6 | web3.eth.defaultAccount = accountBefore 7 | return result 8 | } 9 | -------------------------------------------------------------------------------- /test/helpers/contract-service-helper.js: -------------------------------------------------------------------------------- 1 | import ContractService from '../../src/services/contract-service' 2 | import V00_Marketplace from './../../contracts/build/contracts/V00_Marketplace.json' 3 | 4 | /* 5 | Returns a contract service instance with a clean marketplace contract 6 | 7 | This creates a clean environment for testing without side effects. 8 | */ 9 | 10 | export default async function contractServiceHelper(web3) { 11 | const accounts = await web3.eth.getAccounts() 12 | const dummyContractService = new ContractService({ web3 }) 13 | const owner = accounts[0] 14 | 15 | const originToken = await dummyContractService.deployed( 16 | dummyContractService.contracts['OriginToken'] 17 | ) 18 | 19 | // Deploy clean listings registry for testing without side effects 20 | const receipt = await dummyContractService.deploy( 21 | dummyContractService.contracts['V00_Marketplace'], 22 | [originToken.options.address], 23 | { from: owner, gas: 4000000 } 24 | ) 25 | 26 | const v00_marketplace = new web3.eth.Contract(V00_Marketplace.abi, receipt.contractAddress) 27 | await v00_marketplace.methods.addAffiliate( 28 | accounts[3], 29 | '0x0000000000000000000000000000000000000000000000000000000000000000' 30 | ).send({ from: owner }) 31 | 32 | const decimals = await dummyContractService.call( 33 | 'OriginToken', 34 | 'decimals' 35 | ) 36 | 37 | // approve usage of tokens by marketplace contract 38 | for (let i = 0; i < 10; i++) { 39 | await dummyContractService.call( 40 | 'OriginToken', 41 | 'approve', 42 | [ receipt.contractAddress, String(100 * 10**decimals) ], 43 | { from: accounts[i] } 44 | ) 45 | } 46 | 47 | await originToken.methods.addCallSpenderWhitelist(receipt.contractAddress).send({ from: accounts[0], gas: 4000000 }) 48 | 49 | return new ContractService({ 50 | web3, 51 | contractAddresses: { 52 | V00_Marketplace: { 53 | 999: { address: receipt.contractAddress } 54 | } 55 | }, 56 | currencies: { OGN: { address: originToken.options.address, decimals } } 57 | }) 58 | } 59 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | // Skip execution in Node 2 | if (module.hot) { 3 | const context = require.context( 4 | "mocha-loader!./", // Process through mocha-loader 5 | false, // Skip recursive processing 6 | /\.test\.js$/ // Pick only files ending with .test.js 7 | ) 8 | 9 | // Execute each test suite 10 | context.keys().forEach(context) 11 | } 12 | -------------------------------------------------------------------------------- /test/model-listing.test.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai' 2 | 3 | import { Listing } from '../src/models/listing' 4 | 5 | const expect = chai.expect 6 | 7 | describe('Listing', () => { 8 | it(`unitsRemaining should equal to unitsTotal when no offer`, () => { 9 | const chainListing = { offers: [] } 10 | const ipfsListing = { unitsTotal: 10 } 11 | const listing = new Listing('FakeID', chainListing, ipfsListing) 12 | expect(listing.unitsRemaining).to.equal(10) 13 | expect(listing.unitsSold).to.equal(0) 14 | }) 15 | 16 | it(`unitsRemaining should be unitsTotal - unitsSold`, () => { 17 | const chainListing = { 18 | offers: { 19 | offerId1: { status: 'created' }, 20 | offerId2: { status: 'accepted' } 21 | } 22 | } 23 | const ipfsListing = { unitsTotal: 10 } 24 | const listing = new Listing('FakeID', chainListing, ipfsListing) 25 | expect(listing.unitsRemaining).to.equal(9) 26 | expect(listing.unitsSold).to.equal(1) 27 | }) 28 | }) 29 | -------------------------------------------------------------------------------- /test/resource_messaging.test.js: -------------------------------------------------------------------------------- 1 | import Messaging from '../src/resources/messaging' 2 | import chai, { expect } from 'chai' 3 | chai.use(require('chai-string')) 4 | 5 | import Web3 from 'web3' 6 | import contractServiceHelper from './helpers/contract-service-helper' 7 | 8 | describe('Messaging Resource', function() { 9 | let web3, contractService, messaging, ipfsCreator, OrbitDB, ecies, messagingNamespace 10 | beforeEach(async () => { 11 | const provider = new Web3.providers.HttpProvider('http://localhost:8545') 12 | web3 = new Web3(provider) 13 | // these should be all mocked 14 | ipfsCreator = new Object() 15 | OrbitDB = new Object() 16 | ecies = new Object() 17 | messagingNamespace = 'messaging_test' 18 | contractService = await contractServiceHelper(web3) 19 | messaging = new Messaging({ 20 | contractService, 21 | ipfsCreator, 22 | OrbitDB, 23 | ecies, 24 | messagingNamespace 25 | }) 26 | }) 27 | 28 | describe('init', () => { 29 | it('should be instantiated', () => { 30 | expect(typeof messaging).to.equal('object') 31 | }) 32 | }) 33 | }) 34 | -------------------------------------------------------------------------------- /test/resource_origin_token_test.js: -------------------------------------------------------------------------------- 1 | import ContractService from '../src/services/contract-service' 2 | import Token from '../src/resources/token.js' 3 | 4 | import OriginTokenContract from '../contracts/build/contracts/OriginToken.json' 5 | 6 | import assert from 'assert' 7 | 8 | import Web3 from 'web3' 9 | 10 | class MarketplaceMock { 11 | constructor(tokenAddress) { 12 | this.tokenAddress = tokenAddress 13 | } 14 | 15 | async getTokenAddress() { 16 | return this.tokenAddress 17 | } 18 | } 19 | 20 | describe('Origin Token Resource', function() { 21 | const initialSupply = 100 22 | let TokenResource, OriginToken 23 | let accounts, owner 24 | 25 | this.timeout(5000) // default is 2000 26 | 27 | beforeEach(async function() { 28 | const web3 = new Web3( 29 | new Web3.providers.HttpProvider('http://localhost:8545') 30 | ) 31 | accounts = await web3.eth.getAccounts() 32 | owner = accounts[0] 33 | 34 | const contractService = new ContractService({ web3 }) 35 | const deployReceipt = await contractService.deploy( 36 | contractService.contracts['OriginToken'], 37 | [initialSupply], 38 | { from: owner, gas: 4000000 } 39 | ) 40 | const marketplace = new MarketplaceMock(deployReceipt.contractAddress) 41 | TokenResource = new Token({ contractService, marketplace }) 42 | OriginToken = new web3.eth.Contract( 43 | OriginTokenContract.abi, 44 | deployReceipt.contractAddress 45 | ) 46 | }) 47 | 48 | it('returns balance of owner', async function() { 49 | assert.equal(await TokenResource.balanceOf(owner), initialSupply) 50 | }) 51 | 52 | it('returns correct balances after transfer', async function() { 53 | const other = accounts[1] 54 | const amount = 3 55 | assert.notEqual(owner, other) 56 | await OriginToken.methods.transfer(other, amount).send({ from: owner }) 57 | assert.equal(await TokenResource.balanceOf(owner), initialSupply - amount) 58 | assert.equal(await TokenResource.balanceOf(other), amount) 59 | }) 60 | 61 | it('does not start paused', async function() { 62 | assert.equal(await TokenResource.isPaused(), false) 63 | }) 64 | 65 | it('returns paused when token contract is paused', async function() { 66 | await OriginToken.methods.pause().send({ from: owner }) 67 | assert.equal(await TokenResource.isPaused(), true) 68 | }) 69 | }) 70 | -------------------------------------------------------------------------------- /test/service_ipfs.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import IpfsService from '../src/services/ipfs-service' 3 | import { listings, ipfsHashes } from './fixtures' 4 | 5 | const clearCache = ipfsService => { 6 | const { mapCache } = ipfsService 7 | Object.keys(mapCache.__data__).forEach(key => mapCache.del(key)) 8 | } 9 | 10 | const methodNames = [ 11 | 'saveObjAsFile', 12 | 'saveDataURIAsFile', 13 | 'saveFile', 14 | 'loadObjFromFile', 15 | 'loadFile', 16 | 'gatewayUrlForHash' 17 | ] 18 | 19 | describe('IpfsService', () => { 20 | let ipfsService 21 | 22 | beforeEach(() => { 23 | ipfsService = new IpfsService({ 24 | ipfsDomain: '127.0.0.1', 25 | ipfsApiPort: '5002', 26 | ipfsGatewayPort: '8080', 27 | ipfsGatewayProtocol: 'http' 28 | }) 29 | }) 30 | 31 | methodNames.forEach(methodName => { 32 | it(`should have ${methodName} method`, () => { 33 | expect(ipfsService[methodName]).to.be.an.instanceof(Function) 34 | }) 35 | }) 36 | 37 | describe('saveObjAsFile', () => { 38 | listings.forEach(({ data, ipfsHash }) => { 39 | it('should successfully submit file', async () => { 40 | const submittedHash = await ipfsService.saveObjAsFile(data) 41 | expect(submittedHash).to.equal(ipfsHash) 42 | 43 | const cachedData = await ipfsService.loadObjFromFile(submittedHash) 44 | expect(cachedData).to.deep.eql(data) 45 | 46 | clearCache(ipfsService) 47 | 48 | const submittedData = await ipfsService.loadObjFromFile(ipfsHash) 49 | expect(submittedData).to.deep.eql(data) 50 | }) 51 | }) 52 | }) 53 | 54 | describe('loadFile', () => { 55 | // Skipped because of https://github.com/OriginProtocol/platform/issues/27 56 | xit('should reject when listing cannot be found', done => { 57 | ipfsService 58 | .loadFile('QmWHyrPWQnsz1wxHR219ooJDYTvxJPyZuDUPSDpdsAovN5') 59 | .then(done.fail, error => { 60 | expect(error).to.be.instanceof(Error) 61 | done() 62 | }) 63 | }) 64 | }) 65 | 66 | describe('gatewayUrlForHash', () => { 67 | ipfsHashes.forEach(({ ipfsHash, url }) => { 68 | it(`should correctly create url for IPFS hash ${ipfsHash}`, () => { 69 | const result = ipfsService.gatewayUrlForHash(ipfsHash) 70 | expect(result).to.equal(url['local']) 71 | }) 72 | }) 73 | }) 74 | }) 75 | -------------------------------------------------------------------------------- /test/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require("path") 2 | const HtmlWebpackPlugin = require("html-webpack-plugin") 3 | const [, config] = require("../webpack.config") 4 | const { EnvironmentPlugin, NamedModulesPlugin } = require("webpack") 5 | 6 | delete config.output 7 | 8 | Object.assign(config, { 9 | devServer: { 10 | stats: "errors-only", 11 | host: "localhost", 12 | port: 8081, 13 | overlay: { 14 | errors: true, 15 | warnings: true 16 | } 17 | }, 18 | 19 | performance: { hints: false }, 20 | 21 | entry: ["babel-polyfill", path.join(__dirname, "index.js")], 22 | 23 | plugins: [ 24 | new NamedModulesPlugin(), 25 | new EnvironmentPlugin({ 26 | IPFS_DOMAIN: "", 27 | IPFS_API_PORT: "", 28 | IPFS_GATEWAY_PORT: "", 29 | IPFS_GATEWAY_PROTOCOL: "" 30 | }), 31 | new HtmlWebpackPlugin({ title: "Tests" }) 32 | ] 33 | }) 34 | 35 | module.exports = config 36 | -------------------------------------------------------------------------------- /token/.gitignore: -------------------------------------------------------------------------------- 1 | # See http://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | 6 | -------------------------------------------------------------------------------- /token/.npmignore: -------------------------------------------------------------------------------- 1 | # See https://docs.npmjs.com/misc/developers#keeping-files-out-of-your-package for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | -------------------------------------------------------------------------------- /token/README.md: -------------------------------------------------------------------------------- 1 | This directory contains code for interacting with Origin tokens on TestNets. 2 | - faucet: Web application that implements a faucet for Origin tokens. 3 | - scripts: Scripts and cli tools. 4 | - lib: Common code. 5 | 6 | Prerequisite 7 | ============ 8 | Use origin-box to start an origin-js container. 9 | 10 | docker-compose up origin-js 11 | 12 | To use the faucet 13 | ================= 14 | Start the server in the origin-js container 15 | 16 | docker exec -w /app/token origin-js node faucet/app.js --network_ids=999 17 | 18 | The server should start and you can point your browser to http://localhost:5000 to access the faucet web UI. 19 | 20 | To use the cli 21 | ============== 22 | Example 1 - get the balance of an account: 23 | 24 | docker exec -w /app/token origin-js node scripts/token_cli.js --action=balance --wallet=0xf17f52151ebef6c7334fad080c5704d77216b732 25 | 26 | Example 2 - credit an account with tokens: 27 | 28 | docker exec -w /app/token origin-js node scripts/token_cli.js --action=credit --wallet=0xf17f52151ebef6c7334fad080c5704d77216b732 -------------------------------------------------------------------------------- /token/faucet/app.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const { RateLimiterMemory } = require('rate-limiter-flexible') 3 | const Web3 = require('web3') 4 | 5 | const Config = require('../lib/config.js') 6 | const Token = require('../lib/token.js') 7 | 8 | const DEFAULT_SERVER_PORT = 5000 9 | const DEFAULT_NETWORK_ID = '999' // Local blockchain. 10 | 11 | // Credit 100 tokens per request. 12 | const NUM_TOKENS = 100 13 | 14 | 15 | // Starts the Express server. 16 | function runApp(config) { 17 | const app = express() 18 | const token = new Token(config) 19 | 20 | // Configure rate limiting. Allow at most 1 request per IP every 60 sec. 21 | const opts = { 22 | points: 1, // Point budget. 23 | duration: 60, // Reset points consumption every 60 sec. 24 | } 25 | const rateLimiter = new RateLimiterMemory(opts) 26 | const rateLimiterMiddleware = (req, res, next) => { 27 | // Rate limiting only applies to the /tokens route. 28 | if (req.url.startsWith('/tokens')) { 29 | rateLimiter.consume(req.connection.remoteAddress) 30 | .then(() => { 31 | // Allow request and consume 1 point. 32 | next() 33 | }) 34 | .catch((err) => { 35 | // Not enough points. Block the request. 36 | console.log(`Rejecting request due to rate limiting.`) 37 | res.status(429).send('

Too Many Requests

') 38 | }) 39 | } else { 40 | next() 41 | } 42 | } 43 | // Note: register rate limiting middleware *before* all routes 44 | // so that it gets executed first. 45 | app.use(rateLimiterMiddleware) 46 | 47 | // Configure directory for public assets. 48 | app.use(express.static(__dirname + '/public')) 49 | 50 | // Register the /tokens route for crediting tokens. 51 | app.get('/tokens', async function (req, res, next) { 52 | const networkId = req.query.network_id 53 | const wallet = req.query.wallet 54 | if (!req.query.wallet) { 55 | res.send('

Error: A wallet address must be supplied.

') 56 | } else if (!Web3.utils.isAddress(wallet)) { 57 | res.send(`

Error: ${wallet} is a malformed wallet address.

`) 58 | return 59 | } 60 | 61 | try { 62 | // Transfer NUM_TOKENS to specified wallet. 63 | const value = token.toNaturalUnit(NUM_TOKENS) 64 | const contractAddress = token.contractAddress(networkId) 65 | const receipt = await token.credit(networkId, wallet, value) 66 | const txHash = receipt.transactionHash 67 | console.log(`${NUM_TOKENS} OGN -> ${wallet} TxHash=${txHash}`) 68 | 69 | // Send response back to client. 70 | const resp = `Credited ${NUM_TOKENS} OGN tokens to wallet ${wallet}
` + 71 | `TxHash = ${txHash}
` + 72 | `OGN token contract address = ${contractAddress}` 73 | res.send(resp) 74 | } catch (err) { 75 | next(err) // Errors will be passed to Express. 76 | } 77 | }) 78 | 79 | // Start the server. 80 | app.listen( 81 | config.port || DEFAULT_SERVER_PORT, 82 | () => console.log(`Origin faucet app listening on port ${config.port}!`)) 83 | } 84 | 85 | // 86 | // Main 87 | // 88 | const args = Config.parseArgv() 89 | const config = { 90 | // Port server listens on. 91 | port: args['--port'] || DEFAULT_SERVER_PORT, 92 | // Network ids, comma separated. 93 | // If no network ids specified, defaults to using local blockchain. 94 | networkIds: (args['--network_ids'] || DEFAULT_NETWORK_ID).split(','), 95 | } 96 | 97 | try { 98 | config.providers = Config.createProviders(config.networkIds) 99 | } catch (err) { 100 | console.log('Config error:', err) 101 | process.exit(-1) 102 | } 103 | 104 | runApp(config) 105 | -------------------------------------------------------------------------------- /token/faucet/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Origin Protocol OGN - Testnet token faucet 6 | 7 | 8 | 9 | 10 | 11 |
12 |
13 |
14 |
15 |
16 |

Origin Protocol OGN

17 |

Token Faucet

18 |
19 |
20 |
21 |
22 | Wallet address: 23 |
24 | 25 |
26 |
27 |
28 |
29 |
30 | TestNet: 31 |
32 | 38 |
39 |
40 |
41 | 42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | 50 | 51 | -------------------------------------------------------------------------------- /token/faucet/public/static/css/styles.css: -------------------------------------------------------------------------------- 1 | @import 'variables.css'; 2 | 3 | 4 | html { 5 | font-size: 16px; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | height: 100%; 9 | } 10 | 11 | body { 12 | color: var(--dark); 13 | font-family: var(--default-font); 14 | font-size: 1.125rem; 15 | font-weight: 300; 16 | line-height: 1.5; 17 | background-image: url('/static/img/faucet.png'); 18 | background-repeat: no-repeat; 19 | background-position: 50% 0; 20 | -ms-background-size: cover; 21 | -o-background-size: cover; 22 | -moz-background-size: cover; 23 | -webkit-background-size: cover; 24 | background-size: cover; 25 | } 26 | 27 | .btn { 28 | font-weight: 900; 29 | border-radius: 30px; 30 | font-size: 1.125rem; 31 | padding-right: 30px; 32 | padding-left: 30px; 33 | width: 196px; 34 | } 35 | 36 | .submit { 37 | margin-top: 10px; 38 | } 39 | 40 | .btn.disabled, 41 | .btn:disabled { 42 | cursor: not-allowed; 43 | opacity: 0.33; 44 | } 45 | 46 | .btn-clear { 47 | border: solid 1px white; 48 | background-color: transparent; 49 | color: white; 50 | } 51 | 52 | .btn-clear:hover { 53 | background-color: white; 54 | color: var(--dark-two); 55 | } 56 | 57 | .card { 58 | top: 40%; 59 | padding: 20px; 60 | border-radius: 10px; 61 | background-color: rgba(255, 255, 255, 0.95); 62 | } 63 | 64 | .card-title { 65 | line-height: 2.25rem; 66 | font-weight: 400; 67 | } 68 | 69 | .card-title.secondary { 70 | color: var(--clear-blue); 71 | padding-bottom: 10px; 72 | } 73 | /* Common */ 74 | 75 | hr { 76 | border-bottom: 1px solid var(--light); 77 | border-top: 0; 78 | margin: 0 auto 2rem; 79 | padding: 2rem 0 0; 80 | } 81 | 82 | hr.dark { 83 | border-bottom: 1px solid var(--steel); 84 | } 85 | 86 | hr.sm { 87 | margin-bottom: 1rem; 88 | padding-top: 1rem; 89 | } 90 | 91 | h1, 92 | h2, 93 | h3 { 94 | font-family: var(--heading-font); 95 | line-height: 1.5; 96 | } 97 | 98 | h1 { 99 | font-size: 2.5rem; 100 | font-weight: 200; 101 | margin-bottom: 1.5rem; 102 | } 103 | 104 | h2 { 105 | font-size: 1.5rem; 106 | font-weight: 300; 107 | margin-bottom: 1.25rem; 108 | } 109 | 110 | h3 { 111 | font-size: 1.25rem; 112 | font-weight: 400; 113 | margin-bottom: 1.125rem; 114 | } 115 | 116 | p { 117 | color: var(--dark); 118 | font-size: 1.125rem; 119 | margin-bottom: 1rem; 120 | } 121 | -------------------------------------------------------------------------------- /token/faucet/public/static/css/variables.css: -------------------------------------------------------------------------------- 1 | @import url('https://fonts.googleapis.com/css?family=Lato:100,100i,300,300i,400,400i,700,700i,900,900i'); 2 | @import url('https://fonts.googleapis.com/css?family=Poppins:100,100i,300,300i,400,400i,700,700i,900,900i'); 3 | 4 | 5 | :root { 6 | --default-font: 'Lato', 'Helvetica Neue', 'Arial', sans-serif; 7 | --heading-font: 'Poppins', 'Helvetica Neue', 'Arial', sans-serif; 8 | 9 | --dark: #111d28; 10 | --dark-two: #213040; 11 | --light: #c2cbd3; 12 | --clear-blue: #1a82ff; 13 | --pale-clear-blue: #f5fafc; 14 | --dark-grey-blue: #2e3f53; 15 | --dark-clear-blue: #0169e6; 16 | --dark-grey: #282727; 17 | --pale-grey: #ebf0f3; 18 | --pale-grey-two: #dfe6ea; 19 | --pale-grey-three: #f6f7f8; 20 | --pale-grey-four: #fafbfc; 21 | --pale-grey-five: #f7f8f9; 22 | --pale-grey-six: #fafafa; 23 | --pale-grey-seven: #eaeef1; 24 | --dusk: #455d75; 25 | --steel: #6f8294; 26 | --greenblue: #26d198; 27 | --pale-greenblue: #f5fcfa; 28 | --golden-rod: #f4c110; 29 | --light-greenblue: #59ffcb; 30 | --bluish-purple: #6e3bea; 31 | --bluey-grey: #98a7b4; 32 | --dark-blue-grey: #0c2033; 33 | --orange-red: #ff1a1a; 34 | --red: #f34755; 35 | --dark-red: #a2686c; 36 | --light-red: #fbdbdb; 37 | 38 | --default-radius: 5px; 39 | } 40 | -------------------------------------------------------------------------------- /token/faucet/public/static/img/faucet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OriginProtocol/origin-js/10191926afa6c52a0468fa5e6532ecff4e5b08fe/token/faucet/public/static/img/faucet.png -------------------------------------------------------------------------------- /token/lib/owner_whitelist.js: -------------------------------------------------------------------------------- 1 | const validOwners = { 2 | // mainnet 3 | "1": [ 4 | '0xe011fa2a6df98c69383457d87a056ed0103aa352', 5 | '0x8a1a4f77f9f0eb35fb9930696038be6220986c1b' 6 | ], 7 | 8 | // Ropsten 9 | "3": [ 10 | // Empty list means allow all 11 | ], 12 | 13 | // Rinkeby 14 | "4": [ 15 | // Empty list means allow all 16 | ], 17 | 18 | // development / local blockchain 19 | "999": [ 20 | // These are the first two accounts for the default local blockchain started 21 | // by origin.js. There's no practical reason to have these, other than to 22 | // test. 23 | '0x627306090abaB3A6e1400e9345bC60c78a8BEf57', 24 | '0xf17f52151ebef6c7334fad080c5704d77216b732' 25 | ], 26 | } 27 | 28 | /** 29 | * Returns true if the given owner address is in the list of valid token owners 30 | * for the given network ID. This helps prevent transferring of accidentally 31 | * transferring ownership to an address not under your control. 32 | * @param {string} networkId - Ethereum network ID. 33 | * @param {string} newOwner - Address of the new owner. 34 | */ 35 | const isValidOwner = (networkId, newOwner, validOwners = validOwners) => { 36 | const newOwnerLower = newOwner.toLowerCase() 37 | const whitelist = validOwners[networkId] 38 | if (typeof whitelist === undefined) { 39 | throw new Error(`No whitelist defined for network ${networkId}`) 40 | } 41 | if (whitelist.length === 0) { 42 | return true 43 | } 44 | return whitelist.filter(address => address.toLowerCase() === newOwnerLower).length > 0 45 | } 46 | 47 | module.exports = { isValidOwner, validOwners } 48 | -------------------------------------------------------------------------------- /token/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "origin-faucet", 3 | "description": "Origin TestNet faucet.", 4 | "version": "0.1.0", 5 | "dependencies": { 6 | "bignumber.js": "^7.2.1", 7 | "express": "^4.16.3", 8 | "rate-limiter-flexible": "^0.17.1", 9 | "truffle-hdwallet-provider": "0.0.6", 10 | "truffle-privatekey-provider": "^0.1.0", 11 | "web3": "^1.0.0-beta.35" 12 | }, 13 | "devDependencies": {}, 14 | "main": "index.js", 15 | "scripts": { 16 | "test": "echo \"Error: no test specified\" && exit 1" 17 | }, 18 | "author": "Origin Protocol", 19 | "license": "MIT" 20 | } 21 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var nodeExternals = require('webpack-node-externals'); 2 | 3 | /* 4 | * Generate index.js used in `origin` npm package. 5 | * This is used by our DApp. 6 | */ 7 | var serverConfig = { 8 | entry: ["babel-polyfill", './src/index.js'], 9 | output: { 10 | filename: './index.js', 11 | libraryTarget: 'commonjs2' 12 | }, 13 | mode: 'development', 14 | devtool: 'inline-cheap-module-source-map', 15 | target: 'node', 16 | externals: [nodeExternals()], 17 | resolve: { 18 | /** 19 | * Overriding the default to allow jsx to be resolved automatically. 20 | */ 21 | extensions: ['.js', '.json', '.jsx'], 22 | /** 23 | * Access config from anywhere via `import settings from 'settings'`` 24 | */ 25 | }, 26 | module: { 27 | rules: [ 28 | { 29 | test: /\.jsx?$/, 30 | exclude: /(node_modules|bower_components)/, 31 | loader: 'babel-loader', 32 | query: { 33 | presets: ['env', 'es2015', 'react'], 34 | plugins: ['transform-class-properties'] 35 | } 36 | } 37 | ] 38 | } 39 | } 40 | 41 | /* 42 | * Generate `origin.js` file that may be used standalone in browser 43 | * This is used by our docs, and included in our github release. 44 | */ 45 | var clientConfig = { 46 | entry: ["babel-polyfill", './src/index.js'], 47 | output: { 48 | filename: './origin.js', 49 | libraryTarget: 'var', 50 | library: 'Origin' 51 | }, 52 | mode: 'production', 53 | devtool: false, 54 | target: 'web', 55 | module: { 56 | rules: [ 57 | { 58 | test: /\.js$/, 59 | exclude: /(node_modules|bower_components)/, 60 | loader: 'babel-loader', 61 | query: { 62 | presets: ['babel-preset-es2015'], 63 | plugins: ['transform-class-properties'] 64 | } 65 | } 66 | ] 67 | } 68 | 69 | } 70 | 71 | module.exports = [ serverConfig, clientConfig ]; 72 | --------------------------------------------------------------------------------