diff --git a/.github/workflows/TEST-bdd-5.yml b/.github/workflows/TEST-bdd-5.yml index ba6e2d7f2f..5b91f8cf36 100644 --- a/.github/workflows/TEST-bdd-5.yml +++ b/.github/workflows/TEST-bdd-5.yml @@ -26,3 +26,8 @@ jobs: - run: cp .origintrail_noderc.travis .origintrail_noderc - run: npm run bootstrap - run: npm run test:bdd -- --tags=@fifth --world-parameters '{"appDataBaseDir":"$CUCUMBER_ARTIFACTS_DIR","keepFailedArtifacts":true}'; + - uses: actions/upload-artifact@v2 + if: ${{ always() }} + with: + name: my-artifact + path: /home/runner/work/ot-node/ot-node/artifacts diff --git a/.github/workflows/TEST-docker.yml b/.github/workflows/TEST-docker.yml index ba08765f31..09dcff72fe 100644 --- a/.github/workflows/TEST-docker.yml +++ b/.github/workflows/TEST-docker.yml @@ -17,17 +17,16 @@ jobs: uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - - name: Cache dependencies - uses: actions/cache@v2 - with: - path: ~/.npm - key: npm-${{ hashFiles('package-lock.json') }} - restore-keys: npm- - - run: npm install - run: mkdir -p $ARTIFACTS_DIR - run: sudo chmod -R 777 $ARTIFACTS_DIR - run: mkdir -p $CUCUMBER_ARTIFACTS_DIR - run: sudo chmod -R 777 $CUCUMBER_ARTIFACTS_DIR - - run: sudo ./setup_arangodb.sh - run: cp .origintrail_noderc.travis .origintrail_noderc + - run: npm install + - run: npm run ganache > $ARTIFACTS_DIR/ganache.log & npm run truffle:deploy:ganache > $ARTIFACTS_DIR/truffle-migrate.log; - run: ./test/docker/check_image.sh; + - uses: actions/upload-artifact@v2 + if: ${{ always() }} + with: + name: my-artifact + path: /home/runner/work/ot-node/ot-node/artifacts diff --git a/.github/workflows/TEST-truffle.yml b/.github/workflows/TEST-truffle.yml index 0b598fd203..203bfede5f 100644 --- a/.github/workflows/TEST-truffle.yml +++ b/.github/workflows/TEST-truffle.yml @@ -19,12 +19,6 @@ jobs: uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - - name: Cache dependencies - uses: actions/cache@v2 - with: - path: ~/.npm - key: npm-${{ hashFiles('package-lock.json') }} - restore-keys: npm- - run: npm install - run: mkdir -p $ARTIFACTS_DIR - run: sudo chmod -R 777 $ARTIFACTS_DIR @@ -32,5 +26,10 @@ jobs: - run: sudo chmod -R 777 $CUCUMBER_ARTIFACTS_DIR - run: sudo ./setup_arangodb.sh - run: cp .origintrail_noderc.travis .origintrail_noderc - - run: npm run truffle:test > $ARTIFACTS_DIR/truffle-test.log; - - run: npm run truffle:deploy:ganache > $ARTIFACTS_DIR/truffle-migrate.log; + - run: npm run ganache > $ARTIFACTS_DIR/run-ganache1.log & npm run truffle:test > $ARTIFACTS_DIR/truffle-test.log; + - run: npm run ganache > $ARTIFACTS_DIR/run-ganache2.log & npm run truffle:deploy:ganache > $ARTIFACTS_DIR/truffle-migrate.log; + - uses: actions/upload-artifact@v2 + if: ${{ always() }} + with: + name: my-artifact + path: /home/runner/work/ot-node/ot-node/artifacts diff --git a/.github/workflows/TEST-unit.yml b/.github/workflows/TEST-unit.yml index 6e25f8aa6c..0c22cae80a 100644 --- a/.github/workflows/TEST-unit.yml +++ b/.github/workflows/TEST-unit.yml @@ -5,6 +5,7 @@ env: NODE_ENV: development ARTIFACTS_DIR: artifacts CUCUMBER_ARTIFACTS_DIR: artifacts/cucumber + SEQUELIZEDB: artifacts/system.db jobs: test: runs-on: ubuntu-latest @@ -12,23 +13,20 @@ jobs: matrix: node-version: [9.x] steps: - - run: mkdir -p $ARTIFACTS_DIR - - run: ls -l /home/runner/work/ot-node/ot-node/ - - run: mkdir -p $CUCUMBER_ARTIFACTS_DIR - - run: ls -l /home/runner/work/ot-node/ot-node/artifacts - - run: echo hello > $ARTIFACTS_DIR/world.txt - - run: echo hello > $CUCUMBER_ARTIFACTS_DIR/world.txt - - run: cd artifacts && pwd - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - run: npm install + - run: mkdir -p $ARTIFACTS_DIR + - run: sudo chmod -R 777 $ARTIFACTS_DIR + - run: mkdir -p $CUCUMBER_ARTIFACTS_DIR + - run: sudo chmod -R 777 $CUCUMBER_ARTIFACTS_DIR - run: sudo ./setup_arangodb.sh - run: cp .origintrail_noderc.travis .origintrail_noderc - - run: npm run lint - - run: npm run test:unit > $ARTIFACTS_DIR/mocha-logs.log + - run: npm run lint > $ARTIFACTS_DIR/lint.log + - run: npm run test > $ARTIFACTS_DIR/unit.log; - uses: actions/upload-artifact@v2 if: ${{ always() }} with: diff --git a/.gitignore b/.gitignore index f68a7cf929..04c7bca49c 100644 --- a/.gitignore +++ b/.gitignore @@ -15,7 +15,7 @@ manual-testing-scripts node.log keys data -modules/blockchain_interface/ethereum/build/* +tools/local-network-setup/temporary-config-files modules/Blockchain/Ethereum/test/development.test.js modules/Blockchain/Ethereum/build/ modules/Blockchain/XDai/build/ diff --git a/README.md b/README.md index 0c4520efc7..b944e7b617 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,15 @@ OriginTrail Node ================ -[![Build Status](https://travis-ci.org/OriginTrail/ot-node.svg?branch=master)](https://travis-ci.org/OriginTrail/ot-node) +[comment]: # (TODO: Insert badges for github builds) +![badge](https://img.shields.io/docker/automated/origintrail/ot-node) __OriginTrail is an ecosystem dedicated to making global supply chains work together by enabling a universal, collaborative and trusted data exchange.__ This repository contains a work-in-progress code for a network node. -__OriginTrail Decentralized Network (ODN)__ is currently running in the __OriginTrail v4 Freedom-Gemini Mainnet__ stage. For further information about the roadmap please see our [website](https://tech.origintrail.io/roadmap). +__OriginTrail Decentralized Network (ODN)__ is currently running in the __OriginTrail v5 Mainnet__ stage. + For further information about the roadmap please see our [website](https://tech.origintrail.io/roadmap). [Please see our main documentation page for more information](http://docs.origintrail.io) @@ -34,15 +36,15 @@ The key issues OriginTrail tackles are: The OriginTrail Ecosystem is built on 3 main pillars: - ***Neutrality + ###Neutrality Being an open-source, decentralized system, based on open global standards, neutrality is crucial for the OriginTrail ecosystem as it prevents vendor lock-ins, ensures integrity, and effectively breaks data silos. Neutrality means adopting co-creation principles, working with other blockchain ecosystems and solutions builders even as they may be competing in the same market on the application level. - ***Usability + ###Usability Both blockchain environments, as well as OriginTrail, are fundamental technologies. In order to ensure the onboarding of enterprises, there needs to be a great focus on enhancing the user experience, as solutions need to meet the expectations of rapid value generation. - ***Inclusiveness + ###Inclusiveness Continuing to form partnerships with technological and business global leaders that can employ the OriginTrail ecosystem for their communities. Catering to the needs of leading global communities requires us to be making strides in designing technical infrastructure and business models that support the adoption of the OriginTrail in diverse business communities. diff --git a/config/config.json b/config/config.json index 2bb85f4bb4..3cc6e04c4b 100644 --- a/config/config.json +++ b/config/config.json @@ -56,9 +56,9 @@ "implementations": [ { "blockchain_title": "Ethereum", - "network_id": "development", - "hub_contract_address": "0x2B7ca432a13e0D035BC46F0d6bf3cde1E72A10E5", - "identity_filepath": "erc725_identity.json", + "network_id": "stargazer", + "hub_contract_address": "0x0987197628Bb06133B6FA2409eb4cF9FCaFe8d3a", + "identity_filepath": "stargazer_identity.json", "gas_limit": "2000000", "gas_price": "20000000000", "max_allowed_gas_price" : 100000000000, @@ -68,38 +68,13 @@ "node_wallet": "", "node_private_key": "", "management_wallet": "", - "plugins": [ - { - "enabled": false, - "provider": "Hyperledger", - "name": "fingerprint-plugin", - "config": { - "url": "URL", - "auth": { - "user": "USER", - "pass": "PASS" - } - } - } - ] - }, - { - "blockchain_title": "xDai", - "network_id": "xdai:testnet", - "rpc_server_url": "https://xdai.poanetwork.dev", - "hub_contract_address": "0x17324BdB37c607c0742e95eFc98973221E1eB9b4", - "identity_filepath": "xdai_erc725_identity.json", - "gas_limit": "2000000", - "gas_price": "1000000000", - "dc_price_factor" : "3", - "dh_price_factor" : "2", - "trac_price_in_base_currency" : "0.4" + "plugins": [] }, { "blockchain_title": "Ethereum", - "network_id": "ganache_7545", - "hub_contract_address": "0x2B7ca432a13e0D035BC46F0d6bf3cde1E72A10E5", - "identity_filepath": "ganache_7545_erc725_identity.json", + "network_id": "copernicus", + "hub_contract_address": "0x791ee543738B997B7A125bc849005B62aFD35578", + "identity_filepath": "copernicus_identity.json", "gas_limit": "2000000", "gas_price": "20000000000", "max_allowed_gas_price" : 100000000000, @@ -109,49 +84,20 @@ "node_wallet": "", "node_private_key": "", "management_wallet": "", - "plugins": [ - { - "enabled": false, - "provider": "Hyperledger", - "name": "fingerprint-plugin", - "config": { - "url": "URL", - "auth": { - "user": "USER", - "pass": "PASS" - } - } - } - ] + "plugins": [] }, { - "blockchain_title": "Ethereum", - "network_id": "ganache_8545", - "hub_contract_address": "0x2B7ca432a13e0D035BC46F0d6bf3cde1E72A10E5", - "identity_filepath": "ganache_8545_erc725_identity.json", + "blockchain_title": "xDai", + "network_id": "xdai:testnet", + "rpc_server_url": "https://xdai.poanetwork.dev", + "hub_contract_address": "0x17324BdB37c607c0742e95eFc98973221E1eB9b4", + "identity_filepath": "xdai_erc725_identity.json", "gas_limit": "2000000", - "gas_price": "20000000000", - "max_allowed_gas_price" : 100000000000, + "gas_price": "1000000000", "dc_price_factor" : "3", "dh_price_factor" : "2", - "trac_price_in_base_currency" : "0.00005", - "node_wallet": "", - "node_private_key": "", - "management_wallet": "", - "plugins": [ - { - "enabled": false, - "provider": "Hyperledger", - "name": "fingerprint-plugin", - "config": { - "url": "URL", - "auth": { - "user": "USER", - "pass": "PASS" - } - } - } - ] + "trac_price_in_base_currency" : "0.4", + "plugins": [] } ] }, diff --git a/migrations/202105241126105-remove-obsolete-permanent-commands.js b/migrations/202105241126105-remove-obsolete-permanent-commands.js new file mode 100644 index 0000000000..97b97c206c --- /dev/null +++ b/migrations/202105241126105-remove-obsolete-permanent-commands.js @@ -0,0 +1,6 @@ +module.exports = { + up: async (queryInterface) => { + await queryInterface.sequelize.query('DELETE FROM commands WHERE name = \'dhLitigationInitiatedCommand\''); + }, + down: async () => { }, +}; diff --git a/migrations/202106011054103-update-column-types.js b/migrations/202106011054103-update-column-types.js new file mode 100644 index 0000000000..a6e891730e --- /dev/null +++ b/migrations/202106011054103-update-column-types.js @@ -0,0 +1,42 @@ +const constats = require('../modules/constants'); + +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface + .changeColumn('public_keys', 'public_key', { + type: Sequelize.TEXT, + }); + await queryInterface + .changeColumn('public_keys', 'timestamp', { + type: Sequelize.BIGINT, + }); + await queryInterface + .changeColumn('offers', 'data_set_id', { + type: Sequelize.STRING, + unique: false, + }); + await queryInterface + .changeColumn('offers', 'price_factor_used_for_price_calculation', { + type: Sequelize.STRING, + }); + }, + down: async (queryInterface, Sequelize) => { + await queryInterface + .changeColumn('public_keys', 'public_key', { + type: Sequelize.STRING, + }); + await queryInterface + .changeColumn('public_keys', 'timestamp', { + type: Sequelize.INTEGER, + }); + await queryInterface + .changeColumn('offers', 'data_set_id', { + type: Sequelize.STRING, + unique: true, + }); + await queryInterface + .changeColumn('offers', 'price_factor_used_for_price_calculation', { + type: Sequelize.INTEGER, + }); + }, +}; diff --git a/models/offers.js b/models/offers.js index dcbd554a06..aa282876ee 100644 --- a/models/offers.js +++ b/models/offers.js @@ -25,7 +25,7 @@ module.exports = (sequelize, DataTypes) => { number_of_verified_replications: DataTypes.INTEGER, trac_in_base_currency_used_for_price_calculation: DataTypes.STRING, gas_price_used_for_price_calculation: DataTypes.STRING, - price_factor_used_for_price_calculation: DataTypes.INTEGER, + price_factor_used_for_price_calculation: DataTypes.STRING, offer_finalize_transaction_hash: DataTypes.STRING(128), blockchain_id: DataTypes.STRING, replication_start_timestamp: DataTypes.STRING, diff --git a/models/public_keys.js b/models/public_keys.js index 9cb52ac7fc..67c45758e6 100644 --- a/models/public_keys.js +++ b/models/public_keys.js @@ -7,11 +7,11 @@ module.exports = (sequelize, DataTypes) => { primaryKey: true, defaultValue: () => uuidv4(), }, - public_key: DataTypes.STRING, + public_key: DataTypes.TEXT, node_erc: DataTypes.STRING, node_id: DataTypes.STRING, timestamp: { - type: DataTypes.INTEGER, + type: DataTypes.BIGINT, defaultValue: () => Date.now(), }, }, {}); diff --git a/modules/Blockchain.js b/modules/Blockchain.js index 4680bd0f6a..c9a6356be5 100644 --- a/modules/Blockchain.js +++ b/modules/Blockchain.js @@ -196,6 +196,21 @@ class Blockchain { return this.pluginService.execute(name, data); } + /** + * Gets the version variable from a contract, might throw an error if the variable doesn't exist + * @param contractName + * @param blockchain_id + * @param {Boolean} showUninitialized - Return all implementations, not only initialized ones + * @returns {Object} - An object containing the blockchain_id string and the response promise + */ + getContractVersion(contractName, blockchain_id, showUninitialized = false) { + const implementation = this._getImplementationFromId(blockchain_id, showUninitialized); + return { + blockchain_id: implementation.getBlockchainId(), + response: implementation.getContractVersion(contractName), + }; + } + /** * Gets profile by wallet * @param identity diff --git a/modules/Blockchain/Ethereum/abi/holding.json b/modules/Blockchain/Ethereum/abi/holding.json index 08f0579f31..c3635c9ffb 100644 --- a/modules/Blockchain/Ethereum/abi/holding.json +++ b/modules/Blockchain/Ethereum/abi/holding.json @@ -13,6 +13,20 @@ "stateMutability": "view", "type": "function" }, + { + "constant": true, + "inputs": [], + "name": "version", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, { "constant": true, "inputs": [], diff --git a/modules/Blockchain/Ethereum/contracts/Holding.sol b/modules/Blockchain/Ethereum/contracts/Holding.sol index b01e2e548e..8b0e5050e0 100644 --- a/modules/Blockchain/Ethereum/contracts/Holding.sol +++ b/modules/Blockchain/Ethereum/contracts/Holding.sol @@ -16,6 +16,8 @@ contract Holding is Ownable { Hub public hub; uint256 public difficultyOverride; + uint256 public constant version = 101; + constructor(address hubAddress) public{ require(hubAddress!=address(0)); hub = Hub(hubAddress); @@ -109,13 +111,13 @@ contract Holding is Ownable { if(parentIdentity != address(0)){ CreditorHandler(hub.getContractAddress("CreditorHandler")).finalizeOffer(offerId, identity, parentIdentity); } - require(identity == holdingStorage.getOfferCreator(bytes32(offerId)), "Offer can only be finalized by its creator!"); + require(identity == holdingStorage.getOfferCreator(bytes32(offerId))); require(holdingStorage.getOfferStartTime(bytes32(offerId)) == 0); // Check if signatures match identities - require(ERC725(holderIdentity[0]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[0]))), confirmation1))), 4), "Wallet from holder 1 does not have encryption approval!"); - require(ERC725(holderIdentity[1]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[1]))), confirmation2))), 4), "Wallet from holder 2 does not have encryption approval!"); - require(ERC725(holderIdentity[2]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[2]))), confirmation3))), 4), "Wallet from holder 3 does not have encryption approval!"); + require(ERC725(holderIdentity[0]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[0]), uint256(encryptionType[0]))), confirmation1))), 4), "Wallet from holder 1 does not have encryption approval!"); + require(ERC725(holderIdentity[1]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[1]), uint256(encryptionType[1]))), confirmation2))), 4), "Wallet from holder 2 does not have encryption approval!"); + require(ERC725(holderIdentity[2]).keyHasPurpose(keccak256(abi.encodePacked(ecrecovery(keccak256(abi.encodePacked(offerId,uint256(holderIdentity[2]), uint256(encryptionType[2]))), confirmation3))), 4), "Wallet from holder 3 does not have encryption approval!"); // Verify task answer bytes32[3] memory hashes; diff --git a/modules/Blockchain/Ethereum/contracts/TestingUtilities.sol b/modules/Blockchain/Ethereum/contracts/TestingUtilities.sol index 0d0f9bf757..55d1613d27 100644 --- a/modules/Blockchain/Ethereum/contracts/TestingUtilities.sol +++ b/modules/Blockchain/Ethereum/contracts/TestingUtilities.sol @@ -48,6 +48,11 @@ contract TestingUtilities{ return keccak256(abi.encodePacked(uint256(a),uint256(b))); } + function keccakBytesAddressNumber(bytes32 a, address b, uint256 c) + public pure returns (bytes32) { + return keccak256(abi.encodePacked(uint256(a),uint256(b),uint256(c))); + } + function keccakAddressAddressAddress(address a, address b, address c) public pure returns (bytes32) { return keccak256(abi.encodePacked(a,b,c)); diff --git a/modules/Blockchain/Ethereum/migrations/2_total_migration.js b/modules/Blockchain/Ethereum/migrations/2_total_migration.js index ab52c968e5..12468eed27 100644 --- a/modules/Blockchain/Ethereum/migrations/2_total_migration.js +++ b/modules/Blockchain/Ethereum/migrations/2_total_migration.js @@ -126,7 +126,7 @@ module.exports = async (deployer, network, accounts) => { ); await hub.setContractAddress('Replacement', replacement.address); - for (let i = 0; i < 10; i += 1) { + for (let i = 0; i < 20; i += 1) { amounts.push(amountToMint); recepients.push(accounts[i]); } @@ -209,7 +209,7 @@ module.exports = async (deployer, network, accounts) => { ); await hub.setContractAddress('Replacement', replacement.address); - for (let i = 0; i < 10; i += 1) { + for (let i = 0; i < 20; i += 1) { amounts.push(amountToMint); recepients.push(accounts[i]); } @@ -286,7 +286,7 @@ module.exports = async (deployer, network, accounts) => { .then(result => token = result); holding = await deployer.deploy(MockHolding); - for (var i = 0; i < 10; i += 1) { + for (var i = 0; i < 20; i += 1) { amounts.push(amountToMint); recepients.push(accounts[i]); } @@ -496,6 +496,15 @@ module.exports = async (deployer, network, accounts) => { // { gas: 6000000, gasPrice: 8000000000 }, // ); break; + case 'updateContract': + hub = await Hub.at('insert hub contract address here'); + console.log(JSON.stringify(hub.address)); + if (hub.address) { + holding = await deployer + .deploy(Holding, hub.address, { gas: 7000000, from: accounts[0] }); + await hub.setContractAddress('Holding', holding.address); + } + break; default: console.warn('Please use one of the following network identifiers: ganache, mock, test, or rinkeby'); break; diff --git a/modules/Blockchain/Ethereum/test/creditor.handler.test.js b/modules/Blockchain/Ethereum/test/creditor.handler.test.js index 0371e66c42..f24bfa7678 100644 --- a/modules/Blockchain/Ethereum/test/creditor.handler.test.js +++ b/modules/Blockchain/Ethereum/test/creditor.handler.test.js @@ -96,16 +96,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -126,7 +129,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -319,16 +327,19 @@ contract('Creditor handler testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -350,7 +361,8 @@ contract('Creditor handler testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -372,7 +384,11 @@ contract('Creditor handler testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -456,16 +472,19 @@ contract('Creditor handler testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -487,7 +506,8 @@ contract('Creditor handler testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -507,7 +527,11 @@ contract('Creditor handler testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -527,7 +551,8 @@ contract('Creditor handler testing', async (accounts) => { for (let i = 0; i < res.length; i += 1) { assert(tokenAmountPerHolder.eq(res[i].stakedAmount), `Token amount not matching for identity ${sortedIdentities[i].identity}!` + `Expected ${tokenAmountPerHolder.toString()}, but got ${res[i].stakedAmount}!`); - assert.equal(res[i].litigationEncryptionType, i, 'Litigation hash not matching!'); + const chosenEnctyptionType = sortedIdentities[i].color.toString(10); + assert.equal(res[i].litigationEncryptionType, chosenEnctyptionType, 'Litigation hash not matching!'); } const finalStake = []; diff --git a/modules/Blockchain/Ethereum/test/litigation.test.js b/modules/Blockchain/Ethereum/test/litigation.test.js index 332f2e1db3..74b57c6fe8 100644 --- a/modules/Blockchain/Ethereum/test/litigation.test.js +++ b/modules/Blockchain/Ethereum/test/litigation.test.js @@ -223,16 +223,19 @@ contract('Litigation testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -255,8 +258,8 @@ contract('Litigation testing', async (accounts) => { let promises = []; for (let i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - promises[i] = await util.keccakBytesAddress - .call(offerId, sortedIdentities[i].identity); + promises[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop confirmations = await Promise.all(promises); @@ -282,7 +285,11 @@ contract('Litigation testing', async (accounts) => { signedConfirmations[0].signature, signedConfirmations[1].signature, signedConfirmations[2].signature, - [new BN(2), new BN(2), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, diff --git a/modules/Blockchain/Ethereum/test/metrics.test.js b/modules/Blockchain/Ethereum/test/metrics.test.js index f87229716f..9affca643b 100644 --- a/modules/Blockchain/Ethereum/test/metrics.test.js +++ b/modules/Blockchain/Ethereum/test/metrics.test.js @@ -99,16 +99,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -130,7 +133,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, diff --git a/modules/Blockchain/Ethereum/test/offer.test.js b/modules/Blockchain/Ethereum/test/offer.test.js index e139b993e5..43a09e6416 100644 --- a/modules/Blockchain/Ethereum/test/offer.test.js +++ b/modules/Blockchain/Ethereum/test/offer.test.js @@ -96,16 +96,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -126,7 +129,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -308,16 +316,19 @@ contract('Offer testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -339,7 +350,8 @@ contract('Offer testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -359,7 +371,11 @@ contract('Offer testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -379,7 +395,11 @@ contract('Offer testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -409,7 +429,8 @@ contract('Offer testing', async (accounts) => { res = await holdingStorage.holder.call(offerId, sortedIdentities[i].identity); assert(tokenAmountPerHolder.eq(res.stakedAmount), 'Token amount not matching!'); - assert.equal(res.litigationEncryptionType, i, 'Red litigation hash not matching!'); + const expectedEncryptionType = sortedIdentities[i].color.toString(10); + assert.equal(res.litigationEncryptionType, expectedEncryptionType, 'Litigation hash not matching!'); } for (i = 0; i < confimations.length; i += 1) { diff --git a/modules/Blockchain/Ethereum/truffle.js b/modules/Blockchain/Ethereum/truffle.js index 6324339bfd..f9868fdeb4 100644 --- a/modules/Blockchain/Ethereum/truffle.js +++ b/modules/Blockchain/Ethereum/truffle.js @@ -94,5 +94,12 @@ module.exports = { websockets: true, skipDryRun: true, }, + + updateContract: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, }, }; diff --git a/modules/Blockchain/Web3Implementation/Transactions.js b/modules/Blockchain/Web3Implementation/Transactions.js index baa6060c84..c49d1936e6 100644 --- a/modules/Blockchain/Web3Implementation/Transactions.js +++ b/modules/Blockchain/Web3Implementation/Transactions.js @@ -35,6 +35,10 @@ class Transactions { // eslint-disable-next-line no-await-in-loop const result = await this._sendTransaction(transaction, serializedTx); + + if (!result) { + future.reject(new TransactionFailedError('Received empty response from blockchain', transaction)); + } if (result.status === '0x0') { future.reject(result); } else { @@ -49,11 +53,10 @@ class Transactions { const transactionReceipt = // eslint-disable-next-line no-await-in-loop await this._fetchTransactionReceipt(transactionHash); - - if (transactionReceipt.status) { + if (transactionReceipt && transactionReceipt.status) { future.resolve(transactionReceipt); } else { - future.reject(transactionReceipt); + future.reject(new TransactionFailedError(`Failed to fetch transaction receipt. Received receipt: ${transactionReceipt}`)); } transactionHandled = true; @@ -160,7 +163,7 @@ class Transactions { try { // eslint-disable-next-line no-await-in-loop receipt = await this.web3.eth.getTransactionReceipt(transactionHash); - if (Object.keys(receipt).length > 0) { + if (receipt && typeof receipt === 'object' && Object.keys(receipt).length > 0) { break; } this.logger.warn(`Failed to fetch transaction receipt from empty response on attempt ${i + 1}.`); diff --git a/modules/Blockchain/Web3Implementation/index.js b/modules/Blockchain/Web3Implementation/index.js index 59837862af..7d1959eb15 100644 --- a/modules/Blockchain/Web3Implementation/index.js +++ b/modules/Blockchain/Web3Implementation/index.js @@ -778,6 +778,30 @@ class Web3Implementation { return this.lastBlock; } + /** + * Gets the version variable from a contract, might throw an error if the variable doesn't exist + * @param contractName + * @returns {Promise} + */ + async getContractVersion(contractName) { + this.logger.trace(`[${this.getBlockchainId()}] Reading ${contractName} contract version.`); + const contract = this.contractsByName[contractName]; + + if (!contract || Utilities.isZeroHash(contract._address)) { + return; + } + + const code = await this.web3.eth.getCode(contract._address); + + const signature = 'version()'; + const hash = this.web3.eth.abi.encodeFunctionSignature(signature); + + if (code.indexOf(hash.slice(2, hash.length)) > 0) { + return contract.methods.version().call(); + } + throw Error('Contract does not have version variable'); + } + /** * Gets all past events for the contract * @param contractName @@ -1090,7 +1114,7 @@ class Web3Implementation { * @param wallet {string} * @return {Promise<[]>} */ - getWalletPurposes(erc725Identity, wallet) { + async getWalletPurposes(erc725Identity, wallet) { const erc725IdentityContract = new this.web3.eth.Contract( this.erc725IdentityContractAbi, erc725Identity, diff --git a/modules/Blockchain/XDai/abi/holding.json b/modules/Blockchain/XDai/abi/holding.json index 08f0579f31..c3635c9ffb 100644 --- a/modules/Blockchain/XDai/abi/holding.json +++ b/modules/Blockchain/XDai/abi/holding.json @@ -13,6 +13,20 @@ "stateMutability": "view", "type": "function" }, + { + "constant": true, + "inputs": [], + "name": "version", + "outputs": [ + { + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, { "constant": true, "inputs": [], diff --git a/modules/Blockchain/XDai/contracts/TestingUtilities.sol b/modules/Blockchain/XDai/contracts/TestingUtilities.sol index 0d0f9bf757..55d1613d27 100644 --- a/modules/Blockchain/XDai/contracts/TestingUtilities.sol +++ b/modules/Blockchain/XDai/contracts/TestingUtilities.sol @@ -48,6 +48,11 @@ contract TestingUtilities{ return keccak256(abi.encodePacked(uint256(a),uint256(b))); } + function keccakBytesAddressNumber(bytes32 a, address b, uint256 c) + public pure returns (bytes32) { + return keccak256(abi.encodePacked(uint256(a),uint256(b),uint256(c))); + } + function keccakAddressAddressAddress(address a, address b, address c) public pure returns (bytes32) { return keccak256(abi.encodePacked(a,b,c)); diff --git a/modules/Blockchain/XDai/migrations/2_total_migration.js b/modules/Blockchain/XDai/migrations/2_total_migration.js index cc60e03671..12468eed27 100644 --- a/modules/Blockchain/XDai/migrations/2_total_migration.js +++ b/modules/Blockchain/XDai/migrations/2_total_migration.js @@ -11,7 +11,6 @@ var Litigation = artifacts.require('Litigation'); // eslint-disable-line no-unde var Marketplace = artifacts.require('Marketplace'); // eslint-disable-line no-undef var Replacement = artifacts.require('Replacement'); // eslint-disable-line no-undef var Approval = artifacts.require('Approval'); // eslint-disable-line no-undef -var XDaiTrac = artifacts.require('XDAITestTrac'); // eslint-disable-line no-undef var ProfileStorage = artifacts.require('ProfileStorage'); // eslint-disable-line no-undef var HoldingStorage = artifacts.require('HoldingStorage'); // eslint-disable-line no-undef @@ -25,7 +24,6 @@ var TestingUtilities = artifacts.require('TestingUtilities'); // eslint-disable- var Identity = artifacts.require('Identity'); // eslint-disable-line no-undef const amountToMint = (new BN(5)).mul((new BN(10)).pow(new BN(30))); -const tokenSupply = (new BN(5)).mul((new BN(10)).pow(new BN(25))); module.exports = async (deployer, network, accounts) => { let hub; @@ -52,18 +50,97 @@ module.exports = async (deployer, network, accounts) => { var temp2; switch (network) { - case 'development': - case 'ganache': case 'test': + await deployer.deploy(TestingUtilities); + await deployer.deploy(Hub, { gas: 6000000, from: accounts[0] }) .then((result) => { hub = result; }); await hub.setContractAddress('Owner', accounts[0]); - if (network === 'test') { - await deployer.deploy(TestingUtilities); + profileStorage = await deployer.deploy( + ProfileStorage, + hub.address, { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('ProfileStorage', profileStorage.address); + + holdingStorage = await deployer.deploy( + HoldingStorage, + hub.address, + { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('HoldingStorage', holdingStorage.address); + + marketplaceStorage = await deployer.deploy( + MarketplaceStorage, + hub.address, + { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('MarketplaceStorage', marketplaceStorage.address); + + litigationStorage = await deployer.deploy( + LitigationStorage, + hub.address, + { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('LitigationStorage', litigationStorage.address); + + approval = await deployer.deploy(MockApproval); + await hub.setContractAddress('Approval', approval.address); + + token = await deployer.deploy(TracToken, accounts[0], accounts[1], accounts[2]); + await hub.setContractAddress('Token', token.address); + + profile = await deployer.deploy(Profile, hub.address, { gas: 7000000, from: accounts[0] }); + await hub.setContractAddress('Profile', profile.address); + + holding = await deployer.deploy(Holding, hub.address, { gas: 7000000, from: accounts[0] }); + await hub.setContractAddress('Holding', holding.address); + + creditorHandler = await deployer.deploy( + CreditorHandler, + hub.address, + { gas: 7000000, from: accounts[0] }, + ); + await hub.setContractAddress('CreditorHandler', creditorHandler.address); + + litigation = await deployer.deploy( + Litigation, + hub.address, + { gas: 7000000, from: accounts[0] }, + ); + await hub.setContractAddress('Litigation', litigation.address); + + marketplace = await deployer.deploy( + Marketplace, + hub.address, + { gas: 7000000, from: accounts[0] }, + ); + await hub.setContractAddress('Marketplace', marketplace.address); + + replacement = await deployer.deploy( + Replacement, + hub.address, + { gas: 7000000, from: accounts[0] }, + ); + await hub.setContractAddress('Replacement', replacement.address); + + for (let i = 0; i < 20; i += 1) { + amounts.push(amountToMint); + recepients.push(accounts[i]); } + await token.mintMany(recepients, amounts, { from: accounts[0] }); + await token.finishMinting({ from: accounts[0] }); + + break; + case 'development': + case 'ganache': + await deployer.deploy(Hub, { gas: 6000000, from: accounts[0] }) + .then((result) => { + hub = result; + }); + await hub.setContractAddress('Owner', accounts[0]); profileStorage = await deployer.deploy( ProfileStorage, @@ -95,7 +172,7 @@ module.exports = async (deployer, network, accounts) => { approval = await deployer.deploy(Approval); await hub.setContractAddress('Approval', approval.address); - token = await deployer.deploy(XDaiTrac, accounts[0], amountToMint); + token = await deployer.deploy(TracToken, accounts[0], accounts[1], accounts[2]); await hub.setContractAddress('Token', token.address); profile = await deployer.deploy(Profile, hub.address, { gas: 9000000, from: accounts[0] }); @@ -132,7 +209,7 @@ module.exports = async (deployer, network, accounts) => { ); await hub.setContractAddress('Replacement', replacement.address); - for (let i = 0; i < 10; i += 1) { + for (let i = 0; i < 20; i += 1) { amounts.push(amountToMint); recepients.push(accounts[i]); } @@ -155,8 +232,142 @@ module.exports = async (deployer, network, accounts) => { console.log(`\t MarketplaceStorage contract address: \t${marketplaceStorage.address}`); break; - case 'contracts': - console.log(`Deploying from wallet: ${accounts[0]}`); + case 'supplyTokens': + await Hub.at('0x0987197628Bb06133B6FA2409eb4cF9FCaFe8d3a') + .then((result) => { + hub = result; + }); + console.log(hub); + temp = await hub.getContractAddress.call('Token'); + console.log(temp); + console.log(temp); + console.log(temp); + console.log(temp); + token = await TracToken.at(temp); + await token.transfer(accounts[0], amountToMint.divn(2), { from: accounts[1] }); + break; + case 'setIdentity': + temp = await deployer.deploy(TestingUtilities); + temp = await TestingUtilities.deployed(); + temp = await temp.keccakAddress.call('0xc37c75271deed11c095a96ea0eedcc87e9d35152'); + temp2 = await Identity.at('0x611d771aafaa3d6fb66c4a81d97768300a6882d5'); + try { + await temp2.addKey( + temp, + [new BN(237)], + new BN(1), + { from: accounts[6] }, + ); + } catch (e) { + temp = await temp2.getKey.call(temp); + console.log(temp.purposes[0].toString()); + } + + break; + case 'removeIdentity': + temp = await deployer.deploy(TestingUtilities); + temp = await TestingUtilities.deployed(); + temp = await temp.keccakAddress.call('0xc37c75271deed11c095a96ea0eedcc87e9d35152'); + temp2 = await Identity.at('0x611d771aafaa3d6fb66c4a81d97768300a6882d5'); + try { + await temp2.removeKey( + temp, + { from: accounts[6] }, + ); + } catch (e) { + temp = await temp2.getKey.call(temp); + console.log(temp.purposes[0].toString()); + } + + break; + case 'mock': + + await deployer.deploy(TracToken, accounts[0], accounts[1], accounts[2]) + .then(result => token = result); + holding = await deployer.deploy(MockHolding); + + for (var i = 0; i < 20; i += 1) { + amounts.push(amountToMint); + recepients.push(accounts[i]); + } + await token.mintMany(recepients, amounts, { from: accounts[0] }); + + console.log('\n\n \t Contract adressess on ganache (mock versions):'); + console.log(`\t Token contract address: \t${token.address}`); + console.log(`\t Escrow contract address: \t${holding.address}`); + break; + case 'updateRinkeby': + await deployer.deploy(Hub) + .then((result) => { + hub = result; + }); + await hub.setContractAddress('Owner', accounts[0]); + + oldHub = await OldHub.at(''); + + temp = await oldHub.profileStorageAddress.call(); + profileStorage = await ProfileStorage.at(temp); + await hub.setContractAddress('ProfileStorage', profileStorage.address); + + holdingStorage = await deployer.deploy( + HoldingStorage, + hub.address, + ); + await hub.setContractAddress('HoldingStorage', holdingStorage.address); + + litigationStorage = await deployer.deploy( + LitigationStorage, + hub.address, + ); + await hub.setContractAddress('LitigationStorage', litigationStorage.address); + + approval = await deployer.deploy(Approval); + await hub.setContractAddress('Approval', approval.address); + + await hub.setContractAddress('Token', '0x98d9a611ad1b5761bdc1daac42c48e4d54cf5882'); + + profile = await deployer.deploy(Profile, hub.address); + await hub.setContractAddress('Profile', profile.address); + + holding = await deployer.deploy(Holding, hub.address); + await hub.setContractAddress('Holding', holding.address); + + litigation = await deployer.deploy( + Litigation, + hub.address, + ); + await hub.setContractAddress('Litigation', litigation.address); + + replacement = await deployer.deploy( + Replacement, + hub.address, + ); + await hub.setContractAddress('Replacement', replacement.address); + + temp = await oldHub.profileAddress.call(); + await hub.setContractAddress('OldProfile', temp); + temp2 = await oldHub.holdingAddress.call(); + await hub.setContractAddress('OldHolding', temp2); + + await profileStorage.setHubAddress(hub.address); + + console.log('\n\n \t Contract adressess on rinkeby:'); + console.log(`\t Hub contract address: \t\t\t${hub.address}`); + console.log(`\t Approval contract address: \t\t${approval.address}`); + console.log(`\t Profile contract address: \t\t${profile.address}`); + console.log(`\t Holding contract address: \t\t${holding.address}`); + console.log(`\t Litigation contract address: \t\t${litigation.address}`); + console.log(`\t Replacement contract address: \t\t${replacement.address}`); + + console.log(`\t ProfileStorage contract address: \t${profileStorage.address}`); + console.log(`\t HoldingStorage contract address: \t${holdingStorage.address}`); + console.log(`\t LitigationStorage contract address: \t${litigationStorage.address}`); + + + console.log(`\t OldProfile contract address: \t${temp}`); + console.log(`\t OldHolding contract address: \t${temp2}`); + break; + case 'rinkeby': await deployer.deploy(Hub) .then((result) => { hub = result; @@ -191,7 +402,7 @@ module.exports = async (deployer, network, accounts) => { approval = await deployer.deploy(Approval); await hub.setContractAddress('Approval', approval.address); - await hub.setContractAddress('Token', '0x18F75411914f45665f352908F1D3D11f0Eb01f2A'); + await hub.setContractAddress('Token', '0x98d9a611ad1b5761bdc1daac42c48e4d54cf5882'); profile = await deployer.deploy(Profile, hub.address); await hub.setContractAddress('Profile', profile.address); @@ -233,15 +444,66 @@ module.exports = async (deployer, network, accounts) => { console.log(`\t MarketplaceStorage contract address: \t${marketplaceStorage.address}`); break; - case 'token': - recepients = []; - amounts = []; - await deployer.deploy(XDaiTrac, accounts[0], tokenSupply) - .then(async (token) => { - console.log(`Token deployed at: ${token.address}`); - await token.finishMinting({ from: accounts[0] }); - console.log('Finished minting'); + case 'live': + /* + await deployer.deploy(Hub, { gas: 6000000, from: accounts[0] }) + .then((result) => { + hub = result; }); + await hub.setContractAddress('Owner', accounts[0]); + + await hub.setContractAddress('Token', '0xaA7a9CA87d3694B5755f213B5D04094b8d0F0A6F'); + + profileStorage = await deployer.deploy( + ProfileStorage, + hub.address, + { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('ProfileStorage', profileStorage.address); + + holdingStorage = await deployer.deploy( + HoldingStorage, + hub.address, + { gas: 6000000, from: accounts[0] }, + ); + await hub.setContractAddress('HoldingStorage', holdingStorage.address); + + profile = await deployer.deploy(Profile, hub.address, { gas: 6000000, from: accounts[0] }); + await hub.setContractAddress('Profile', profile.address); + + holding = await deployer.deploy(Holding, hub.address, { gas: 6000000, from: accounts[0] }); + await hub.setContractAddress('Holding', holding.address); + + approval = await deployer.deploy(Approval, { gas: 6000000, from: accounts[0] }); + await hub.setContractAddress('Approval', approval.address); + + console.log('\n\n \t Contract adressess on mainnet:'); + console.log(`\t Hub contract address: \t\t\t${hub.address}`); + console.log(`\t Profile contract address: \t\t${profile.address}`); + console.log(`\t Holding contract address: \t\t${holding.address}`); + console.log(`\t Approval contract address: \t\t${approval.address}`); + + console.log(`\t ProfileStorage contract address: \t${profileStorage.address}`); + console.log(`\t HoldingStorage contract address: \t${holdingStorage.address}`); + + */ + + hub = await Hub.at('0xa287d7134fb40bef071c932286bd2cd01efccf30'); + console.log(JSON.stringify(hub)); + // profile = await deployer.deploy( + // Profile, + // hub.address, + // { gas: 6000000, gasPrice: 8000000000 }, + // ); + break; + case 'updateContract': + hub = await Hub.at('insert hub contract address here'); + console.log(JSON.stringify(hub.address)); + if (hub.address) { + holding = await deployer + .deploy(Holding, hub.address, { gas: 7000000, from: accounts[0] }); + await hub.setContractAddress('Holding', holding.address); + } break; default: console.warn('Please use one of the following network identifiers: ganache, mock, test, or rinkeby'); diff --git a/modules/Blockchain/XDai/test/creditor.handler.test.js b/modules/Blockchain/XDai/test/creditor.handler.test.js index 0371e66c42..f24bfa7678 100644 --- a/modules/Blockchain/XDai/test/creditor.handler.test.js +++ b/modules/Blockchain/XDai/test/creditor.handler.test.js @@ -96,16 +96,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -126,7 +129,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -319,16 +327,19 @@ contract('Creditor handler testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -350,7 +361,8 @@ contract('Creditor handler testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -372,7 +384,11 @@ contract('Creditor handler testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -456,16 +472,19 @@ contract('Creditor handler testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -487,7 +506,8 @@ contract('Creditor handler testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -507,7 +527,11 @@ contract('Creditor handler testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -527,7 +551,8 @@ contract('Creditor handler testing', async (accounts) => { for (let i = 0; i < res.length; i += 1) { assert(tokenAmountPerHolder.eq(res[i].stakedAmount), `Token amount not matching for identity ${sortedIdentities[i].identity}!` + `Expected ${tokenAmountPerHolder.toString()}, but got ${res[i].stakedAmount}!`); - assert.equal(res[i].litigationEncryptionType, i, 'Litigation hash not matching!'); + const chosenEnctyptionType = sortedIdentities[i].color.toString(10); + assert.equal(res[i].litigationEncryptionType, chosenEnctyptionType, 'Litigation hash not matching!'); } const finalStake = []; diff --git a/modules/Blockchain/XDai/test/litigation.test.js b/modules/Blockchain/XDai/test/litigation.test.js index 332f2e1db3..74b57c6fe8 100644 --- a/modules/Blockchain/XDai/test/litigation.test.js +++ b/modules/Blockchain/XDai/test/litigation.test.js @@ -223,16 +223,19 @@ contract('Litigation testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -255,8 +258,8 @@ contract('Litigation testing', async (accounts) => { let promises = []; for (let i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - promises[i] = await util.keccakBytesAddress - .call(offerId, sortedIdentities[i].identity); + promises[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop confirmations = await Promise.all(promises); @@ -282,7 +285,11 @@ contract('Litigation testing', async (accounts) => { signedConfirmations[0].signature, signedConfirmations[1].signature, signedConfirmations[2].signature, - [new BN(2), new BN(2), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, diff --git a/modules/Blockchain/XDai/test/metrics.test.js b/modules/Blockchain/XDai/test/metrics.test.js index f87229716f..9affca643b 100644 --- a/modules/Blockchain/XDai/test/metrics.test.js +++ b/modules/Blockchain/XDai/test/metrics.test.js @@ -99,16 +99,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -130,7 +133,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, diff --git a/modules/Blockchain/XDai/test/offer.test.js b/modules/Blockchain/XDai/test/offer.test.js index b63b24a40f..43a09e6416 100644 --- a/modules/Blockchain/XDai/test/offer.test.js +++ b/modules/Blockchain/XDai/test/offer.test.js @@ -96,16 +96,19 @@ async function createOffer(accounts) { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(1), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(2), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(3), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -126,7 +129,8 @@ async function createOffer(accounts) { var hashes = []; let promises = []; for (i = 0; i < 3; i += 1) { - promises[i] = util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + promises[i] = util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // eslint-disable-next-line no-await-in-loop hashes = await Promise.all(promises); @@ -152,7 +156,11 @@ async function createOffer(accounts) { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -308,16 +316,19 @@ contract('Offer testing', async (accounts) => { identity: identities[0], privateKey: privateKeys[0], hash: hash1, + color: new BN(0), }, { identity: identities[1], privateKey: privateKeys[1], hash: hash2, + color: new BN(1), }, { identity: identities[2], privateKey: privateKeys[2], hash: hash3, + color: new BN(2), }, ].sort((x, y) => x.hash.localeCompare(y.hash)); @@ -339,7 +350,8 @@ contract('Offer testing', async (accounts) => { var hashes = []; for (i = 0; i < 3; i += 1) { // eslint-disable-next-line no-await-in-loop - hashes[i] = await util.keccakBytesAddress.call(offerId, sortedIdentities[i].identity); + hashes[i] = await util.keccakBytesAddressNumber + .call(offerId, sortedIdentities[i].identity, sortedIdentities[i].color); } // Getting confirmations @@ -359,7 +371,11 @@ contract('Offer testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -379,7 +395,11 @@ contract('Offer testing', async (accounts) => { confimations[0].signature, confimations[1].signature, confimations[2].signature, - [new BN(0), new BN(1), new BN(2)], + [ + sortedIdentities[0].color, + sortedIdentities[1].color, + sortedIdentities[2].color, + ], [ sortedIdentities[0].identity, sortedIdentities[1].identity, @@ -398,7 +418,7 @@ contract('Offer testing', async (accounts) => { // eslint-disable-next-line no-await-in-loop res = await profileStorage.profile.call(sortedIdentities[i].identity); assert(tokenAmountPerHolder.eq(res.stakeReserved), `Reserved stake amount incorrect for holder ${i + 1}!` - + `\n\tExpected: ${tokenAmountPerHolder.toString(10)}\n\tActual: ${res.stakeReserved.toString(10)}`); + + `\n\tExpected: ${tokenAmountPerHolder.toString(10)}\n\tActual: ${res.stakeReserved.toString(10)}`); } res = await profileStorage.profile.call(DC_identity); assert(tokenAmountPerHolder.mul(new BN(3)).eq(res.stakeReserved), 'Reserved stake amount incorrect for DC!' @@ -409,7 +429,8 @@ contract('Offer testing', async (accounts) => { res = await holdingStorage.holder.call(offerId, sortedIdentities[i].identity); assert(tokenAmountPerHolder.eq(res.stakedAmount), 'Token amount not matching!'); - assert.equal(res.litigationEncryptionType, i, 'Red litigation hash not matching!'); + const expectedEncryptionType = sortedIdentities[i].color.toString(10); + assert.equal(res.litigationEncryptionType, expectedEncryptionType, 'Litigation hash not matching!'); } for (i = 0; i < confimations.length; i += 1) { diff --git a/modules/Blockchain/XDai/truffle.js b/modules/Blockchain/XDai/truffle.js index f4c1d7cb9d..f9868fdeb4 100644 --- a/modules/Blockchain/XDai/truffle.js +++ b/modules/Blockchain/XDai/truffle.js @@ -2,8 +2,6 @@ const result = require('dotenv').config({ path: `${__dirname}/../../../.env` }); var HDWalletProvider = require('truffle-hdwallet-provider'); // eslint-disable-line import/no-unresolved var mnemonic = process.env.TRUFFLE_MNEMONIC; -const privateKey = process.env.XDAI_PRIVATE_KEY; -const rpc_endpoint = process.env.XDAI_ACCESS_KEY; module.exports = { compilers: { @@ -18,6 +16,13 @@ module.exports = { }, networks: { + development: { + host: 'localhost', + port: 8545, + gas: 4000000, + network_id: '*', // Match any network id + }, + ganache: { host: 'localhost', port: 7545, @@ -25,35 +30,76 @@ module.exports = { network_id: '5777', }, - test: { + supplyTokens: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, + + setIdentity: { host: 'localhost', port: 7545, gas: 6000000, network_id: '5777', }, - contracts: { - provider: () => new HDWalletProvider(privateKey, rpc_endpoint), - network_id: 100, - gasPrice: 1000000000, - websockets: false, + removeIdentity: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, + + updateRinkeby: { + host: 'localhost', // Connect to geth on the specified + port: 8545, + provider: () => new HDWalletProvider(mnemonic, `${process.env.RINKEBY_ACCESS_KEY}`), + network_id: 4, + gas: 6000000, // Gas limit used for deploys + websockets: true, skipDryRun: true, }, - token: { - provider: () => new HDWalletProvider(privateKey, `${process.env.XDAI_ACCESS_KEY}`), - network_id: 100, - gas: 1700000, // Gas limit used for deploys - gasPrice: 1000000000, - websockets: false, + test: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, + + mock: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, + + rinkeby: { + host: 'localhost', // Connect to geth on the specified + port: 8545, + provider: () => new HDWalletProvider(mnemonic, `${process.env.RINKEBY_ACCESS_KEY}`), + network_id: 4, + gas: 6500000, // Gas limit used for deploys + websockets: true, skipDryRun: true, }, - xdai: { + live: { + host: 'localhost', + port: 8545, + provider: () => new HDWalletProvider(mnemonic, `${process.env.MAINNET_ACCESS_KEY}`), network_id: 1, - gas: 1700000, // Gas limit used for deploys + gas: 6000000, // Gas limit used for deploys websockets: true, skipDryRun: true, }, + + updateContract: { + host: 'localhost', + port: 7545, + gas: 6000000, + network_id: '5777', + }, }, }; diff --git a/modules/Database/Arangojs.js b/modules/Database/Arangojs.js index a60691f5fd..b5b9cb1979 100644 --- a/modules/Database/Arangojs.js +++ b/modules/Database/Arangojs.js @@ -863,6 +863,112 @@ class ArangoJS { return count; } + /** + * This method will leave only encryption for selected color, datasetId and offerId + * Rest of the encryption data will be removed + * @param datasetId + * @param offerId + * @param leaveColor - if null, random color will be left in database + * @returns {Promise} + */ + async removeUnnecessaryEncryptionData(datasetId, offerId, leaveColor) { + const queryString = `LET datasetMetadata = DOCUMENT('ot_datasets', @datasetId) + +let choosenColor = @leaveColor ? @leaveColor: ( + for v in DOCUMENT('ot_vertices', datasetMetadata.vertices) +filter v.encrypted != null +filter v.encrypted[@offerId] != null +filter ATTRIBUTES(v.encrypted[@offerId]) > 1 +limit 1 +return ATTRIBUTES(v.encrypted[@offerId])[0] +)[0] + +let finalColor = choosenColor ? choosenColor: ( + for v in DOCUMENT('ot_edges', datasetMetadata.edges) +filter v.encrypted != null +filter v.encrypted[@offerId] != null +filter ATTRIBUTES(v.encrypted[@offerId]) > 1 +limit 1 +return ATTRIBUTES(v.encrypted[@offerId])[0] +)[0] + +let verticesAction = (for v in DOCUMENT('ot_vertices', datasetMetadata.vertices) +filter v.encrypted != null +filter v.encrypted[@offerId] != null +filter ATTRIBUTES(v.encrypted[@offerId]) > 1 +let encrypted = merge(v.encrypted, {@offerId: { [finalColor]: v.encrypted[@offerId][finalColor]}}) +return {key: v._key, encrypted} +) +let edgesAction = (for e in DOCUMENT('ot_edges', datasetMetadata.edges) +filter e.encrypted != null +filter e.encrypted[@offerId] != null +filter ATTRIBUTES(e.encrypted[@offerId]) > 1 +let encrypted = merge(e.encrypted, {@offerId: { [finalColor]: e.encrypted[@offerId][finalColor]}}) +return {key: e._key, encrypted} +) + +return {verticesAction, edgesAction}`; + + const actions = await this.runQuery(queryString, { + datasetId, offerId, leaveColor, + }); + actions[0].edgesAction = actions[0].edgesAction ? actions[0].edgesAction : []; + actions[0].verticesAction = actions[0].verticesAction ? actions[0].verticesAction : []; + /* eslint-disable no-unused-expressions,import/no-unresolved,global-require */ + const action = String((params) => { + const { query } = require('@arangodb'); + query`for action in ${params.params.verticesAction} UPDATE {_key: action.key, encrypted: action.encrypted } in 'ot_vertices' OPTIONS { mergeObjects: false }`; + query`for action in ${params.params.edgesAction} UPDATE {_key: action.key, encrypted: action.encrypted } in 'ot_edges' OPTIONS { mergeObjects: false }`; + }); + + await this.db.transaction(['ot_vertices', 'ot_edges'], action, { + params: { + edgesAction: actions[0].edgesAction, + verticesAction: actions[0].verticesAction, + }, + }); + } + + /** + * This method will remove all encryption data for dataset and offer + * @param datasetId + * @param offerId + * @returns {Promise} + */ + async removeEncryptionData(datasetId, offerId) { + const queryString = `LET datasetMetadata = DOCUMENT('ot_datasets', @datasetId) +let verticesAction = (for v in DOCUMENT('ot_vertices', datasetMetadata.vertices) +filter v.encrypted != null +filter v.encrypted[@offerId] != null +let encrypted = unset(v.encrypted, @offerId) +return {key: v._key, encrypted} +) +let edgesAction = (for e in DOCUMENT('ot_edges', datasetMetadata.edges) +filter e.encrypted != null +filter e.encrypted[@offerId] != null +let encrypted = unset(e.encrypted, @offerId) +return {key: e._key, encrypted} +) +return {verticesAction, edgesAction}`; + const actions = await this.runQuery(queryString, { + datasetId, offerId, + }); + + /* eslint-disable no-unused-expressions,import/no-unresolved,global-require */ + const action = String((params) => { + const { query } = require('@arangodb'); + query`for action in ${params.params.verticesAction} UPDATE {_key: action.key, encrypted: action.encrypted } in 'ot_vertices' OPTIONS { mergeObjects: false }`; + query`for action in ${params.params.edgesAction} UPDATE {_key: action.key, encrypted: action.encrypted } in 'ot_edges' OPTIONS { mergeObjects: false }`; + }); + + await this.db.transaction(['ot_vertices', 'ot_edges'], action, { + params: { + edgesAction: actions[0].edgesAction, + verticesAction: actions[0].verticesAction, + }, + }); + } + /** * Updates document imports by ID * @param collectionName @@ -1028,6 +1134,10 @@ class ArangoJS { existing.encrypted[key] = document.encrypted[key]; } } + if (document.objectIds && !existing.objectIds) { + existing.objectIds = document.objectIds; + } + return this.updateDocument(collectionName, existing); } } diff --git a/modules/Database/GraphStorage.js b/modules/Database/GraphStorage.js index befa3a9d8b..dfe689c443 100644 --- a/modules/Database/GraphStorage.js +++ b/modules/Database/GraphStorage.js @@ -167,6 +167,50 @@ class GraphStorage { }); } + /** + * This method will leave only encryption for selected color, datasetId and offerId + * Rest of the encryption data will be removed + * @param datasetId + * @param offerId + * @param leaveColor + * @returns {Promise} + */ + async removeUnnecessaryEncryptionData(datasetId, offerId, leaveColor) { + return new Promise((resolve, reject) => { + if (!this.db) { + reject(Error('Not connected to graph database')); + } else { + this.db.removeUnnecessaryEncryptionData(datasetId, offerId, leaveColor) + .then((result) => { + resolve(result); + }).catch((err) => { + reject(err); + }); + } + }); + } + + /** + * This method will remove all encryption data for dataset and offer + * @param datasetId + * @param offerId + * @returns {Promise} + */ + async removeEncryptionData(datasetId, offerId) { + return new Promise((resolve, reject) => { + if (!this.db) { + reject(Error('Not connected to graph database')); + } else { + this.db.removeEncryptionData(datasetId, offerId) + .then((result) => { + resolve(result); + }).catch((err) => { + reject(err); + }); + } + }); + } + /** * Finds imports IDs based on data location query * diff --git a/modules/EventEmitter.js b/modules/EventEmitter.js index a13696f740..ead52b3499 100644 --- a/modules/EventEmitter.js +++ b/modules/EventEmitter.js @@ -603,7 +603,7 @@ class EventEmitter { }); // async - this._on('kad-replication-finished', async (request) => { + this._on('kad-replication-finished', async (request, response) => { try { const dhNodeId = transport.extractSenderID(request); const replicationFinishedMessage = transport.extractMessage(request); @@ -613,27 +613,12 @@ class EventEmitter { dhWallet = transport.extractSenderInfo(request).wallet; } - if (replicationFinishedMessage.message) { // todo remove if for next update - const { - message, messageSignature, - } = replicationFinishedMessage; - if (!Utilities.isMessageSigned(message, messageSignature)) { - logger.warn(`We have a forger here. Signature doesn't match for message: ${JSON.stringify(message)}`); - return; - } - - await dcService.verifyDHReplication( - message.offerId, messageSignature, - dhNodeId, message.dhIdentity, dhWallet, false, - ); - } - const { - offerId, messageSignature, dhIdentity, + offerId, messageSignature, dhIdentity, alternativeSignature, } = replicationFinishedMessage; await dcService.verifyDHReplication( - offerId, messageSignature, + offerId, response, messageSignature, alternativeSignature, dhNodeId, dhIdentity, dhWallet, false, ); } catch (e) { diff --git a/modules/command/common/autoupdater-command.js b/modules/command/common/autoupdater-command.js index e96ec7a893..b564629974 100644 --- a/modules/command/common/autoupdater-command.js +++ b/modules/command/common/autoupdater-command.js @@ -71,7 +71,7 @@ class AutoupdaterCommand extends Command { return Command.repeat(); } - if (this.config.high_availability_setup) { + if (this.config.high_availability.enabled) { const activeNode = await Models.node_status.findOne({ where: { hostname: this.config.high_availability.private_ip_address }, }); diff --git a/modules/command/dc/dc-convert-to-graph-command.js b/modules/command/dc/dc-convert-to-graph-command.js index 9291d02fac..63ec599751 100644 --- a/modules/command/dc/dc-convert-to-graph-command.js +++ b/modules/command/dc/dc-convert-to-graph-command.js @@ -1,10 +1,13 @@ +const Models = require('../../../models'); const Command = require('../command'); +const Utilities = require('../../Utilities'); class DcConvertToGraphCommand extends Command { constructor(ctx) { super(ctx); this.importWorkerController = ctx.importWorkerController; this.commandExecutor = ctx.commandExecutor; + this.logger = ctx.logger; } /** @@ -13,6 +16,45 @@ class DcConvertToGraphCommand extends Command { */ async execute(command) { try { + const { handler_id, documentPath, data_set_id } = command.data; + const dataInfo = await Models.data_info.findOne({ + where: { data_set_id }, + include: [ + { + model: Models.data_provider_wallets, + attributes: ['wallet', 'blockchain_id'], + }, + ], + }); + + if (dataInfo) { + this.logger.info(`Import already executed for dataset ${data_set_id}`); + + await Utilities.deleteDirectory(documentPath); + + // Set import process as completed + await Models.handler_ids.update( + { + status: 'COMPLETED', + data: JSON.stringify({ + dataset_id: data_set_id, + import_time: dataInfo.import_timestamp, + otjson_size_in_bytes: dataInfo.otjson_size_in_bytes, + root_hash: dataInfo.root_hash, + data_hash: dataInfo.data_hash, + message: 'Dataset already imported on the node, importing skipped', + }), + }, + { + where: { + handler_id, + }, + }, + ); + + return Command.empty(); + } + await this.importWorkerController.startGraphConverterWorker(command); } catch (error) { await this.commandExecutor.add({ diff --git a/modules/command/dc/dc-convert-to-ot-json-command.js b/modules/command/dc/dc-convert-to-ot-json-command.js index 92de6d1edd..9b845a673f 100644 --- a/modules/command/dc/dc-convert-to-ot-json-command.js +++ b/modules/command/dc/dc-convert-to-ot-json-command.js @@ -32,6 +32,7 @@ class DcConvertToOtJsonCommand extends Command { fs.writeFileSync(documentPath, JSON.stringify(document)); + Object.assign(command.data, { data_set_id: document['@id'] }); return this.continueSequence(command.data, command.sequence); } await this.importWorkerController.startOtjsonConverterWorker( diff --git a/modules/command/dc/dc-offer-finalize-command.js b/modules/command/dc/dc-offer-finalize-command.js index 91e5470267..0bf06fb588 100644 --- a/modules/command/dc/dc-offer-finalize-command.js +++ b/modules/command/dc/dc-offer-finalize-command.js @@ -150,10 +150,14 @@ class DCOfferFinalizeCommand extends Command { // send back to miner this.logger.important(`DHs [${excludedDHs}] don't have enough funds for offer ${offerId}. Sending back to miner...`); const { data } = command; - Object.assign(data, { - excludedDHs, - internalOfferId: offer.id, - }); + if (data.excludedDHs) { + data.excludedDHs.push(...excludedDHs); + } else { + Object.assign(data, { + excludedDHs, + internalOfferId: offer.id, + }); + } this.logger.warn(`Failed to finalize offer ${offerId} because some of the DHs didn't have enough funds. Trying again...`); return { commands: [{ diff --git a/modules/command/dc/dc-replication-completed-command.js b/modules/command/dc/dc-replication-completed-command.js index 443c9b0b24..b4a058b34b 100644 --- a/modules/command/dc/dc-replication-completed-command.js +++ b/modules/command/dc/dc-replication-completed-command.js @@ -1,7 +1,8 @@ const Command = require('../command'); const Utilities = require('../../Utilities'); -const encryption = require('../../RSAEncryption'); -const models = require('../../../models/index'); +const Encryption = require('../../RSAEncryption'); +const Models = require('../../../models/index'); +const constants = require('../../constants'); /** * Handles replication request @@ -12,6 +13,7 @@ class DcReplicationCompletedCommand extends Command { this.config = ctx.config; this.logger = ctx.logger; this.transport = ctx.transport; + this.blockchain = ctx.blockchain; } /** @@ -24,38 +26,166 @@ class DcReplicationCompletedCommand extends Command { offerId, dhNodeId, dhWallet, dhIdentity, signature, isReplacement, + alternativeSignature, response, } = command.data; + try { + const offer = await Models.offers.findOne({ + where: { + offer_id: offerId, + }, + }); + const { blockchain_id } = offer; + + const replicatedData = await Models.replicated_data.findOne({ + where: + { + offer_id: offerId, dh_id: dhNodeId, + }, + }); + if (!replicatedData) { + throw new Error(`Failed to find replication for DH node ${dhNodeId}`); + } + + const { signerOld, signerNew } = this.extractSigners( + offerId, dhIdentity, replicatedData.color, + signature, alternativeSignature, + ); + + const confirmation = await this + .validateSignatures( + offerId, dhIdentity, dhWallet, + signerOld, signerNew, signature, alternativeSignature, blockchain_id, + ); + + replicatedData.confirmation = confirmation; + replicatedData.status = 'VERIFIED'; + await replicatedData.save({ fields: ['status', 'confirmation'] }); + + await this.transport.sendResponse(response, { status: 'verified', offer_id: offerId }); + + if (isReplacement === false) { + this.logger.notify(`Replication finished for DH node ${dhNodeId}`); + } else { + this.logger.notify(`Replacement replication finished for DH node ${dhNodeId}`); + } + } catch (error) { + await this._handleError(command, error); + } + + return Command.empty(); + } - const toValidate = [ + extractSigners(offerId, dhIdentity, color, signature, alternativeSignature) { + const oldConfirmationContent = [ Utilities.denormalizeHex(offerId), - Utilities.denormalizeHex(dhIdentity)]; - const address = encryption.extractSignerAddress(toValidate, signature); + Utilities.denormalizeHex(dhIdentity), + ]; + const signerOld = Encryption.extractSignerAddress(oldConfirmationContent, signature); - if (!Utilities.compareHexStrings(address, dhWallet)) { - throw new Error(`Failed to validate DH ${dhWallet} signature for offer ${offerId}`); + const newConfirmationContent = [ + Utilities.denormalizeHex(offerId), + Utilities.denormalizeHex(dhIdentity), + color, + ]; + + const signerNew = alternativeSignature ? + Encryption.extractSignerAddress(newConfirmationContent, alternativeSignature) : + Encryption.extractSignerAddress(newConfirmationContent, signature); + + + return { signerOld, signerNew }; + } + + async validateSignatures( + offerId, dhIdentity, dhWallet, + signerOld, signerNew, signature, alternativeSignature, blockchain_id, + ) { + let errorMessage = `Failed to validate DH ${dhWallet} signature for offer ${offerId}. `; + + const oldSignatureMatches = signerOld && Utilities.compareHexStrings(signerOld, dhWallet); + const newSignatureMatches = signerNew && Utilities.compareHexStrings(signerNew, dhWallet); + + let holdingVersion; + try { + holdingVersion = await this.blockchain + .getContractVersion('HOLDING_CONTRACT', blockchain_id).response; + holdingVersion = parseInt(holdingVersion, 10); + } catch (e) { + if (e.message === 'Contract does not have version variable') { + holdingVersion = 100; + } else { + throw Error('Failed to fetch holding contract version'); + } } - const replicatedData = await models.replicated_data.findOne({ - where: - { - offer_id: offerId, dh_id: dhNodeId, - }, - }); - if (!replicatedData) { - throw new Error(`Failed to find replication for DH node ${dhNodeId}`); + if (holdingVersion >= 101 && newSignatureMatches) { + const signerHasPermission = await this + .signerHasPermission(dhIdentity, dhWallet, blockchain_id); + + if (!signerHasPermission) { + errorMessage += `Extracted signer wallet ${signerNew} ` + + 'does not have the appropriate permissions set up for the given identity ' + + `${dhIdentity}.`; + throw Error(errorMessage); + } + + return alternativeSignature || signature; + } + + if (holdingVersion >= 101 && oldSignatureMatches) { + errorMessage += 'Detected deprecated offer confirmation format, the holder should update their node' + + ' to be eligible for new offers'; + throw Error(errorMessage); + } + + if (holdingVersion >= 101 && !oldSignatureMatches && !newSignatureMatches) { + // TODO In the next version, if the above two checks passed, return this error and + // remove the check below + errorMessage += `Signer wallet does not match the sender wallet ${dhWallet}`; + throw Error(errorMessage); } - replicatedData.confirmation = signature; - replicatedData.status = 'VERIFIED'; - await replicatedData.save({ fields: ['status', 'confirmation'] }); - - if (isReplacement === false) { - this.logger.notify(`Replication finished for DH node ${dhNodeId}`); - } else { - this.logger.notify(`Replacement replication finished for DH node ${dhNodeId}`); + + if (holdingVersion < 101 && oldSignatureMatches) { + const signerHasPermission = await this + .signerHasPermission(dhIdentity, dhWallet, blockchain_id); + + if (!signerHasPermission) { + errorMessage += `Extracted signer wallet ${signerOld} ` + + 'does not have the appropriate permissions set up for the given identity ' + + `${dhIdentity}.`; + throw Error(errorMessage); + } + return signature; } + + errorMessage += `Signer wallet ${signerOld} does not match the sender wallet ${dhWallet}`; + throw Error(errorMessage); + } + + async signerHasPermission(dhIdentity, dhWallet, blockchain_id) { + const purposes = await this.blockchain + .getWalletPurposes(dhIdentity, dhWallet, blockchain_id).response; + + return purposes.includes(constants.IDENTITY_PERMISSION.encryption); + } + + async recover(command, error) { + await this._handleError(command, error); + return Command.empty(); } + async _handleError(command, error) { + const { + offerId, dhIdentity, response, + } = command.data; + + this.logger.warn(`Failed to complete replication for offerId ${offerId} ` + + `and holder identity ${dhIdentity}. ${error.message}`); + + await this.transport.sendResponse(response, { status: 'fail', message: error.message }); + } + /** * Builds default command * @param map diff --git a/modules/command/dc/dc-replication-send-command.js b/modules/command/dc/dc-replication-send-command.js index 730e814188..70a02a948a 100644 --- a/modules/command/dc/dc-replication-send-command.js +++ b/modules/command/dc/dc-replication-send-command.js @@ -3,6 +3,7 @@ const Command = require('../command'); const Utilities = require('../../Utilities'); const Encryption = require('../../RSAEncryption'); const Models = require('../../../models/index'); +const constants = require('../../constants'); /** * Handles replication request @@ -41,7 +42,7 @@ class DCReplicationSendCommand extends Command { const purposes = await this.blockchain .getWalletPurposes(dhIdentity, wallet, blockchainId).response; - if (!purposes.includes('2')) { + if (!purposes.includes(constants.IDENTITY_PERMISSION.action)) { const message = 'Wallet provided does not have the appropriate permissions set up for the given identity.'; this.logger.warn(message); // TODO Send some response to DH to avoid pointlessly waiting @@ -52,8 +53,8 @@ class DCReplicationSendCommand extends Command { const usedDH = await Models.replicated_data.findOne({ where: { dh_id: identity, - dh_wallet: wallet, - dh_identity: dhIdentity, + dh_wallet: wallet.toLowerCase(), + dh_identity: dhIdentity.toLowerCase(), offer_id: offerId, }, }); diff --git a/modules/command/dh/dh-litigation-initiated-command.js b/modules/command/dh/dh-litigation-initiated-command.js deleted file mode 100644 index 7d98561011..0000000000 --- a/modules/command/dh/dh-litigation-initiated-command.js +++ /dev/null @@ -1,98 +0,0 @@ -const Command = require('../command'); -const Utilities = require('../../Utilities'); -const Models = require('../../../models/index'); -const constants = require('../../constants'); - -/** - * Repeatable command that checks whether litigation is successfully initiated - */ -class DHLitigationInitiatedCommand extends Command { - constructor(ctx) { - super(ctx); - this.config = ctx.config; - this.logger = ctx.logger; - this.commandExecutor = ctx.commandExecutor; - this.blockchain = ctx.blockchain; - this.profileService = ctx.profileService; - this.errorNotificationService = ctx.errorNotificationService; - } - - /** - * Executes command and produces one or more events - * @param command - */ - async execute(command) { - try { - const events = await Models.events.findAll({ - where: { - event: 'LitigationInitiated', - finished: 0, - }, - }); - if (events) { - const event = events.find((e) => { - const { - holderIdentity, - } = JSON.parse(e.data); - - return Utilities.compareHexStrings( - holderIdentity, - this.profileService.getIdentity(e.blockchain_id), - ); - }); - if (event) { - event.finished = 1; - await event.save({ fields: ['finished'] }); - - const { - offerId, - requestedObjectIndex, - requestedBlockIndex, - } = JSON.parse(event.data); - - this.logger.warn(`Litigation initiated for offer ${offerId}, object index ${requestedObjectIndex} and block index ${requestedBlockIndex}.`); - - await this.commandExecutor.add({ - name: 'dhLitigationAnswerCommand', - data: { - offerId, - blockchain_id: event.blockchain_id, - objectIndex: requestedObjectIndex, - blockIndex: requestedBlockIndex, - }, - retries: constants.ANSWER_LITIGATION_COMMAND_RETRIES, - }); - } - } - } catch (e) { - this.logger.error(`Failed to process dhLitigationInitiatedCommand. ${e}`); - this.errorNotificationService.notifyError( - e, - null, - constants.PROCESS_NAME.litigationHandling, - ); - } - - return Command.repeat(); - } - - /** - * Builds default AddCommand - * @param map - * @returns {{add, data: *, delay: *, deadline: *}} - */ - default(map) { - const command = { - name: 'dhLitigationInitiatedCommand', - data: { - }, - delay: 0, - period: 5000, - transactional: false, - }; - Object.assign(command, map); - return command; - } -} - -module.exports = DHLitigationInitiatedCommand; diff --git a/modules/command/dh/dh-offer-finalized-command.js b/modules/command/dh/dh-offer-finalized-command.js index d3901bfae8..f18bc93656 100644 --- a/modules/command/dh/dh-offer-finalized-command.js +++ b/modules/command/dh/dh-offer-finalized-command.js @@ -13,6 +13,7 @@ class DhOfferFinalizedCommand extends Command { this.config = ctx.config; this.remoteControl = ctx.remoteControl; this.profileService = ctx.profileService; + this.graphStorage = ctx.graphStorage; } /** @@ -85,7 +86,7 @@ class DhOfferFinalizedCommand extends Command { bid.status = 'NOT_CHOSEN'; await bid.save({ fields: ['status'] }); this.logger.important(`I haven't been chosen for offer ${offerId} on blockchain ${blockchain_id}.`); - // await this.remoteControl.onCompletedBids(); + return Command.empty(); } } @@ -103,6 +104,10 @@ class DhOfferFinalizedCommand extends Command { const bid = await Models.bids.findOne({ where: { offer_id: offerId } }); bid.status = 'NOT_CHOSEN'; await bid.save({ fields: ['status'] }); + await this.graphStorage.removeEncryptionData( + bid.data_set_id, + bid.offer_id, + ); // await this.remoteControl.onCompletedBids(); return Command.empty(); } diff --git a/modules/command/dh/dh-offer-handle-command.js b/modules/command/dh/dh-offer-handle-command.js index c84751ee9f..49d969c9b1 100644 --- a/modules/command/dh/dh-offer-handle-command.js +++ b/modules/command/dh/dh-offer-handle-command.js @@ -2,7 +2,7 @@ const path = require('path'); const Command = require('../command'); const Models = require('../../../models'); -const Utilities = require('../../Utilities'); +const constants = require('../../constants'); /** * Handles new offer from the DH side @@ -63,48 +63,17 @@ class DHOfferHandleCommand extends Command { bid.status = 'SENT'; await bid.save({ fields: ['status'] }); - if (response.status === 'acknowledge') { - this.logger.notify(`Received replication request acknowledgement for offer_id ${offerId} from node ${dcNodeId}.`); - - return { - commands: [ - { - name: 'dhReplicationTimeoutCommand', - delay: this.config.dc_choose_time, - data: { - offerId, - dcNodeId, - }, - }, - ], - }; - } - - this.logger.notify(`Received replication data for offer_id ${offerId} from node ${dcNodeId}.`); + this.logger.notify(`Received replication request acknowledgement for offer_id ${offerId} from node ${dcNodeId}.`); - const cacheDirectory = path.join(this.config.appDataPath, 'import_cache'); - - await Utilities.writeContentsToFile( - cacheDirectory, - offerId, - JSON.stringify({ - otJson: response.otJson, - permissionedData: response.permissionedData, - }), - ); - - const packedResponse = DHOfferHandleCommand._stripResponse(response); - Object.assign(packedResponse, { - dcNodeId, - blockchain_id, - documentPath: path.join(cacheDirectory, offerId), - }); return { commands: [ { - name: 'dhReplicationImportCommand', - data: packedResponse, - transactional: false, + name: 'dhReplicationTimeoutCommand', + delay: constants.OFFER_FINALIZED_COMMAND_DEADLINE_AT, + data: { + offerId, + dcNodeId, + }, }, ], }; diff --git a/modules/command/dh/dh-process-blockchain-events-command.js b/modules/command/dh/dh-process-blockchain-events-command.js new file mode 100644 index 0000000000..2b4cdb52d1 --- /dev/null +++ b/modules/command/dh/dh-process-blockchain-events-command.js @@ -0,0 +1,151 @@ +const Command = require('../command'); +const Utilities = require('../../Utilities'); +const Models = require('../../../models/index'); +const constants = require('../../constants'); + +/** + * Repeatable command that checks whether offer is created or litigation is successfully initiated + */ +class DHProcessBlockchainEventsCommand extends Command { + constructor(ctx) { + super(ctx); + this.config = ctx.config; + this.logger = ctx.logger; + this.commandExecutor = ctx.commandExecutor; + this.blockchain = ctx.blockchain; + this.profileService = ctx.profileService; + this.errorNotificationService = ctx.errorNotificationService; + this.dhService = ctx.dhService; + } + + /** + * Executes command and produces one or more events + * @param command + */ + async execute(command) { + try { + const events = await Models.events.findAll({ + where: { + event: { + [Models.Sequelize.Op.in]: [ + constants.EVENT_NAME.LitigationInitiated, + constants.EVENT_NAME.OfferCreated], + }, + finished: 0, + }, + }); + if (events) { + const litigationInitiatedEvents = events + .filter(e => e.event === constants.EVENT_NAME.LitigationInitiated); + + const offerCreatedEvents = events + .filter(e => e.event === constants.EVENT_NAME.OfferCreated); + + await this.handleLitigationInitiatedEvents(litigationInitiatedEvents); + + await this.handleOfferCreatedEvents(offerCreatedEvents); + } + } catch (e) { + this.logger.error(`Failed to process dhProcessBlockchainEventsCommand. ${e}`); + this.errorNotificationService.notifyError( + e, + null, + constants.PROCESS_NAME.bcEventsHandling, + ); + } + + return Command.repeat(); + } + + async handleLitigationInitiatedEvents(events) { + const allMyIdentities = {}; + this.blockchain.getAllBlockchainIds() + .forEach(id => allMyIdentities[id] = this.profileService.getIdentity(id)); + const replicationData = await Models.replicated_data.findAll({ + where: { status: 'LITIGATION_STARTED' }, + attributes: ['offer_id'], + }); + const myOfferIds = replicationData ? replicationData.map(data => data.offer_id) : []; + const promises = []; + const event = events.find((e) => { + const { holderIdentity, offerId } = JSON.parse(e.data); + if (!myOfferIds.includes(offerId)) { + e.finished = 1; + promises.push(e.save()); + } + return Utilities.compareHexStrings(holderIdentity, allMyIdentities[e.blockchain_id]); + }); + await Promise.all(promises); + if (event) { + const { + offerId, + requestedObjectIndex, + requestedBlockIndex, + } = JSON.parse(event.data); + + this.logger.warn(`Litigation initiated for offer ${offerId}, object index ${requestedObjectIndex} and block index ${requestedBlockIndex}.`); + + await this.commandExecutor.add({ + name: 'dhLitigationAnswerCommand', + data: { + offerId, + blockchain_id: event.blockchain_id, + objectIndex: requestedObjectIndex, + blockIndex: requestedBlockIndex, + }, + retries: constants.ANSWER_LITIGATION_COMMAND_RETRIES, + }); + } + } + + async handleOfferCreatedEvents(events) { + let offerData = {}; + let dcNodeId = ''; + const promises = []; + const event = events.find((e) => { + offerData = JSON.parse(e.data); + dcNodeId = Utilities.denormalizeHex(offerData.dcNodeId).substring(24); + e.finished = 1; + promises.push(e.save()); + return !Utilities.compareHexStrings(this.config.identity, dcNodeId); + }); + await Promise.all(promises); + if (event) { + try { + this.logger.notify(`Offer ${offerData.offerId} has been created by ${dcNodeId} on blockchain ${event.blockchain_id}.`); + await this.dhService.handleOffer( + offerData.offerId, + dcNodeId, + offerData.dataSetSizeInBytes, + offerData.holdingTimeInMinutes, + offerData.litigationIntervalInMinutes, + offerData.tokenAmountPerHolder, + offerData.dataSetId, + event.blockchain_id, + ); + } catch (e) { + this.logger.warn(e.message); + } + } + } + + /** + * Builds default AddCommand + * @param map + * @returns {{add, data: *, delay: *, deadline: *}} + */ + default(map) { + const command = { + name: 'dhProcessBlockchainEventsCommand', + data: { + }, + delay: 0, + period: 5000, + transactional: false, + }; + Object.assign(command, map); + return command; + } +} + +module.exports = DHProcessBlockchainEventsCommand; diff --git a/modules/command/dh/dh-replication-import-command.js b/modules/command/dh/dh-replication-import-command.js index 695fd234fc..44ba6adbaf 100644 --- a/modules/command/dh/dh-replication-import-command.js +++ b/modules/command/dh/dh-replication-import-command.js @@ -54,11 +54,13 @@ class DhReplicationImportCommand extends Command { let decryptedDataset; let encryptedMap; let decryptedGraphRootHash; + let color; this.decryptAndSortDataset(otJson, litigationPublicKey, offerId, encColor) .then((result) => { decryptedDataset = result.decDataset; encryptedMap = result.encMap; + color = encColor; }) .then(() => this.validateDatasetId(dataSetId, decryptedDataset)) .then(() => this.validateRootHash(decryptedDataset, dataSetId, otJson, blockchain_id)) @@ -95,7 +97,12 @@ class DhReplicationImportCommand extends Command { dcWallet, blockchain_id, )) - .then(() => this.sendReplicationFinishedMessage(offerId, dcNodeId, blockchain_id)) + .then(() => this.sendReplicationFinishedMessage( + offerId, + dcNodeId, + color, + blockchain_id, + )) .then(() => this.updateBidData(offerId)) .then(() => this.commandExecutor.add({ name: 'dhOfferFinalizedCommand', @@ -294,21 +301,38 @@ class DhReplicationImportCommand extends Command { }); } - async sendReplicationFinishedMessage(offerId, dcNodeId, blockchainId) { + /** + * Sends replication-finished message to DC, along with bid confirmations + * @param offerId - The offer ID + * @param dcNodeId - The network Identifier of the DC + * @param color - The number of the encryption received (0 = red, 1 = green, 2 = blue) + * @param blockchainId - The blockchain ID for the offer, in order to use the proper + * blockchain identity and wallet + * @returns {Promise} + */ + async sendReplicationFinishedMessage(offerId, dcNodeId, color, blockchainId) { const dhIdentity = this.profileService.getIdentity(blockchainId); const toSign = [ Utilities.denormalizeHex(offerId), Utilities.denormalizeHex(dhIdentity)]; - const { node_wallet, node_private_key } = this.blockchain.getWallet().response; + const { node_wallet, node_private_key } = this.blockchain.getWallet(blockchainId).response; + const toSignNew = [ + Utilities.denormalizeHex(offerId), + Utilities.denormalizeHex(dhIdentity), + color, + ]; const messageSignature = Encryption .signMessage(toSign, Utilities.normalizeHex(node_private_key)); + const alternativeSignature = Encryption + .signMessage(toSignNew, Utilities.normalizeHex(node_private_key)); const replicationFinishedMessage = { offerId, dhIdentity, messageSignature: messageSignature.signature, + alternativeSignature: alternativeSignature.signature, wallet: node_wallet, }; diff --git a/modules/command/dh/dh-replication-timeout-command.js b/modules/command/dh/dh-replication-timeout-command.js index 59c6bb1d7c..0b47ba3aef 100644 --- a/modules/command/dh/dh-replication-timeout-command.js +++ b/modules/command/dh/dh-replication-timeout-command.js @@ -57,7 +57,7 @@ class DHOfferTimeoutCommand extends Command { */ default(map) { const command = { - name: 'dhOfferHandleCommand', + name: 'dhReplicationTimeoutCommand', delay: 0, transactional: false, }; diff --git a/modules/constants.js b/modules/constants.js index ead8543a1c..df32aa3740 100644 --- a/modules/constants.js +++ b/modules/constants.js @@ -67,7 +67,7 @@ exports.OFFER_FINALIZED_COMMAND_DEADLINE_AT = 6 * 60 * 60 * 1000; * @constant {Array} PERMANENT_COMMANDS - List of all permanent commands */ exports.PERMANENT_COMMANDS = [ - 'cleanerCommand', 'dcChallengesCommand', 'dhLitigationInitiatedCommand', + 'cleanerCommand', 'dcChallengesCommand', 'dhProcessBlockchainEventsCommand', 'reputationUpdateCommand', 'autoupdaterCommand', 'exportCleanerCommand', 'trailCleanerCommand', 'handlerIdsCleanerCommand', 'datasetPruningCommand', ]; @@ -148,6 +148,7 @@ exports.PUBLIC_KEY_VALIDITY_IN_MILLS = 30 * 24 * 60 * 60 * 1000; // 30 days */ exports.PROCESS_NAME = { other: 'other', + bcEventsHandling: 'blockchain-events-handling', offerHandling: 'offer-handling', challengesHandling: 'challenges-handling', litigationHandling: 'litigation-handling', @@ -189,7 +190,26 @@ exports.NODE_STATUS = { fallback: 'FALLBACK', updating: 'UPDATING', }; +/** + * Blockchain event names + * @type {{LitigationInitiated: string, OfferCreated: string}} + */ +exports.EVENT_NAME = { + LitigationInitiated: 'LitigationInitiated', + OfferCreated: 'OfferCreated', +}; exports.DB_TYPE = { psql: 'psql', }; + +/** + * @constant {Object: string} IDENTITY_PERMISSION + * Possible permissions for a wallet to have for a given identity + * @type {{management: string, action: string, encryption: string}} + */ +exports.IDENTITY_PERMISSION = { + management: '1', + action: '2', + encryption: '4', +}; diff --git a/modules/controller/dh-controller.js b/modules/controller/dh-controller.js index dae7dfea90..9ce8992061 100644 --- a/modules/controller/dh-controller.js +++ b/modules/controller/dh-controller.js @@ -43,9 +43,18 @@ class DHController { offer_id: offerId, otJson, permissionedData, } = replicationMessage; - this.logger.notify(`Received replication data for offer_id ${offerId} from node ${dcNodeId}.`); + const bid = await Models.bids.findOne({ where: { offer_id: offerId } }); + if (bid.status !== 'SENT') { + this.logger.important(`Already replicated data for offer_id ${offerId} from node ${dcNodeId}`); + await this.transport.sendResponse( + response, + { status: 'fail', message: 'Replication already received' }, + ); + return; + } + const cacheDirectory = path.join(this.config.appDataPath, 'import_cache'); await Utilities.writeContentsToFile( @@ -69,6 +78,7 @@ class DHController { transactional: false, }); } catch (e) { + this.logger.error(e.message); await this.transport.sendResponse(response, { status: 'fail', message: e }); } diff --git a/modules/migration/m9-remove-unnecessary-encryption-data.js b/modules/migration/m9-remove-unnecessary-encryption-data.js new file mode 100644 index 0000000000..d5d48cf75d --- /dev/null +++ b/modules/migration/m9-remove-unnecessary-encryption-data.js @@ -0,0 +1,66 @@ +const Models = require('../../models'); + +/** + * Searches the operational database for missed OfferFinalized events + */ +class M9RemoveEncryptionDataMigration { + constructor({ + logger, blockchain, config, profileService, replicationService, graphStorage, + }) { + this.logger = logger; + this.config = config; + this.blockchain = blockchain; + this.profileService = profileService; + this.replicationService = replicationService; + this.graphStorage = graphStorage; + } + + /** + * Run migration + */ + async run() { + const bids = await Models.bids.findAll({ + attributes: ['data_set_id', 'offer_id', 'blockchain_id', 'status'], + where: { + status: { [Models.Sequelize.Op.in]: ['CHOSEN', 'NOT_CHOSEN'] }, + }, + }); + + const allMyIdentities = {}; + this.blockchain.getAllBlockchainIds() + .forEach(id => allMyIdentities[id] = this.profileService.getIdentity(id)); + + for (const bid of bids) { + try { + // eslint-disable-next-line no-await-in-loop + const holder = await this.blockchain + .getHolder( + bid.offer_id, + allMyIdentities[bid.blockchain_id], + bid.blockchain_id, + ).response; + if (bid.status === 'CHOSEN' && holder.stakedAmount !== '0') { + const encryptionColor = this.replicationService + .castNumberToColor(parseInt(holder.litigationEncryptionType, 10)); + // eslint-disable-next-line no-await-in-loop + await this.graphStorage.removeUnnecessaryEncryptionData( + bid.data_set_id, + bid.offer_id, + encryptionColor, + ); + } else if (bid.status === 'NOT_CHOSEN' && holder.stakedAmount === '0') { + // eslint-disable-next-line no-await-in-loop + await this.graphStorage.removeUnnecessaryEncryptionData( + bid.data_set_id, + bid.offer_id, + null, + ); + } + } catch (error) { + this.logger.warn(`Unable to remove encryption data for offer id: ${bid.offer_id}. Error: ${error.message}`); + } + } + } +} + +module.exports = M9RemoveEncryptionDataMigration; diff --git a/modules/service/dc-service.js b/modules/service/dc-service.js index 2f07102aab..59b73169cf 100644 --- a/modules/service/dc-service.js +++ b/modules/service/dc-service.js @@ -537,20 +537,27 @@ class DCService { /** * Validates and adds DH signature * @param offerId + * @param response - Network Response * @param signature + * @param alternativeSignature * @param dhNodeId * @param dhWallet * @param dhIdentity * @param isReplacement * @returns {Promise} */ - async verifyDHReplication(offerId, signature, dhNodeId, dhIdentity, dhWallet, isReplacement) { + async verifyDHReplication( + offerId, response, signature, alternativeSignature, dhNodeId, dhIdentity, dhWallet, + isReplacement, + ) { await this.commandExecutor.add({ name: 'dcReplicationCompletedCommand', delay: 0, data: { offerId, + response, signature, + alternativeSignature, dhNodeId, dhWallet, dhIdentity, diff --git a/modules/service/dh-service.js b/modules/service/dh-service.js index 2f1ddd6ccc..ff7a83d8f0 100644 --- a/modules/service/dh-service.js +++ b/modules/service/dh-service.js @@ -24,77 +24,17 @@ class DHService { this.remoteControl = ctx.remoteControl; this.pricingService = ctx.pricingService; this.profileService = ctx.profileService; - - const that = this; - this.queue = new Queue((async (args, cb) => { - const { - offerId, - dcNodeId, - dataSetSizeInBytes, - holdingTimeInMinutes, - litigationIntervalInMinutes, - tokenAmountPerHolder, - dataSetId, - blockchain_id, - future, - } = args; - try { - await that._handleOffer( - offerId, - dcNodeId, - dataSetSizeInBytes, - holdingTimeInMinutes, - litigationIntervalInMinutes, - tokenAmountPerHolder, - dataSetId, - blockchain_id, - ); - future.resolve(); - } catch (e) { - future.reject(e); - } - cb(); - }), { concurrent: 1 }); - } - - /** - * Throttle offer using internal queue - */ - handleOffer( - offerId, dcNodeId, - dataSetSizeInBytes, holdingTimeInMinutes, litigationIntervalInMinutes, - tokenAmountPerHolder, dataSetId, blockchain_id, - ) { - return new Promise((resolve, reject) => { - this.queue.push({ - offerId, - dcNodeId, - dataSetSizeInBytes, - holdingTimeInMinutes, - litigationIntervalInMinutes, - tokenAmountPerHolder, - dataSetId, - blockchain_id, - future: { - resolve, reject, - }, - }); - }); } /** * Handles one offer * @returns {Promise} */ - async _handleOffer( + async handleOffer( offerId, dcNodeId, dataSetSizeInBytes, holdingTimeInMinutes, litigationIntervalInMinutes, tokenAmountPerHolder, dataSetId, blockchain_id, ) { - if (dcNodeId === this.config.identity) { - return; // the offer is mine - } - const existingBid = await Models.bids.findOne({ where: { offer_id: offerId, @@ -105,8 +45,6 @@ class DHService { return; } - - this.logger.notify(`Offer ${offerId} has been created by ${dcNodeId} on blockchain ${blockchain_id}.`); if (dataSetSizeInBytes) { const dataSizeInMB = dataSetSizeInBytes / 1000000; if (dataSizeInMB > this.config.dh_maximum_dataset_filesize_in_mb) { @@ -188,8 +126,6 @@ class DHService { data, transactional: false, }); - - // await this.remoteControl.getPendingBids(); } /** @@ -201,7 +137,6 @@ class DHService { * @private */ async _calculatePessimisticMinimumDeposit(bidId, tokenAmountPerHolder, blockchain_id) { - // todo pass blockchain identity const profile = await this.blockchain .getProfile(this.profileService.getIdentity(blockchain_id), blockchain_id).response; const profileStake = new BN(profile.stake, 10); @@ -646,14 +581,6 @@ class DHService { return vertices; } - - async listenToBlockchainEvents() { - this.blockchain.subscribeToEventPermanent([ - 'OfferCreated', - 'NodeApproved', - 'NodeRemoved', - ]); - } } module.exports = DHService; diff --git a/modules/worker/import-worker-controller.js b/modules/worker/import-worker-controller.js index 639cd658d0..d2feb4eae4 100644 --- a/modules/worker/import-worker-controller.js +++ b/modules/worker/import-worker-controller.js @@ -98,6 +98,7 @@ class ImportWorkerController { const commandData = { documentPath, handler_id, + data_set_id: otjson['@id'], }; await this.commandExecutor.add({ name: command.sequence[0], diff --git a/.origintrail_noderc.image b/origintrail_noderc.image similarity index 100% rename from .origintrail_noderc.image rename to origintrail_noderc.image diff --git a/ot-node.js b/ot-node.js index 040763abb2..343d9c7b20 100644 --- a/ot-node.js +++ b/ot-node.js @@ -46,6 +46,7 @@ const M4ArangoMigration = require('./modules/migration/m4-arango-migration'); const M5ArangoPasswordMigration = require('./modules/migration/m5-arango-password-migration'); const M7ArangoDatasetSignatureMigration = require('./modules/migration/m7-arango-dataset-signature-migration'); const M8MissedOfferCheckMigration = require('./modules/migration/m8-missed-offer-check-migration'); +const M9RemoveEncryptionDataMigration = require('./modules/migration/m9-remove-unnecessary-encryption-data'); const ImportWorkerController = require('./modules/worker/import-worker-controller'); const ImportService = require('./modules/service/import-service'); const OtNodeClient = require('./modules/service/ot-node-client'); @@ -276,10 +277,9 @@ class OTNode { await blockchain.loadContracts(); const emitter = container.resolve('emitter'); - const dhService = container.resolve('dhService'); const remoteControl = container.resolve('remoteControl'); const profileService = container.resolve('profileService'); - const approvalService = container.resolve('approvalService'); + const replicationService = container.resolve('replicationService'); emitter.initialize(); @@ -307,7 +307,7 @@ class OTNode { log.notify('================================================================'); } - if (config.high_availability_setup) { + if (config.high_availability.enabled) { const highAvailabilityService = container.resolve('highAvailabilityService'); await highAvailabilityService.startHighAvailabilityNode(); @@ -319,7 +319,6 @@ class OTNode { // Starting event listener on Blockchain this.listenBlockchainEvents(blockchain); - dhService.listenToBlockchainEvents(); try { await profileService.initProfile(); @@ -329,9 +328,18 @@ class OTNode { console.log(e); process.exit(1); } - await transport.start(); await profileService.validateAndUpdateProfiles(); + await this._runArangoRemoveUnnecessaryEncryptionDataMigration( + config, + graphStorage, + blockchain, + profileService, + replicationService, + ); + + await transport.start(); + // Initialize bugsnag notification service const errorNotificationService = container.resolve('errorNotificationService'); await errorNotificationService.initialize(); @@ -426,6 +434,41 @@ class OTNode { } } + async _runArangoRemoveUnnecessaryEncryptionDataMigration( + config, + graphStorage, + blockchain, + profileService, + replicationService, + ) { + const migrationsStartedMills = Date.now(); + + const m9ArangoEncryptionDataMigrationFilename = '9_m9ArangoRemoveUnnecessaryEncryptionDataMigrationFile'; + const migrationDir = path.join(config.appDataPath, 'migrations'); + const migrationFilePath = path.join(migrationDir, m9ArangoEncryptionDataMigrationFilename); + if (!fs.existsSync(migrationFilePath)) { + const migration = new M9RemoveEncryptionDataMigration({ + logger: log, + config, + blockchain, + graphStorage, + profileService, + replicationService, + }); + + try { + log.info('Initializing Arango remove unnecessary encryption data migration...'); + await migration.run(); + log.warn(`One-time Arango remove unnecessary encryption data migration completed. Lasted ${Date.now() - migrationsStartedMills} millisecond(s)`); + + await Utilities.writeContentsToFile(migrationDir, m9ArangoEncryptionDataMigrationFilename, 'PROCESSED'); + } catch (e) { + log.error(`Failed to run code migrations. Lasted ${Date.now() - migrationsStartedMills} millisecond(s). ${e.message}`); + process.exit(1); + } + } + } + async _runArangoPasswordMigration(config) { const migrationsStartedMills = Date.now(); diff --git a/package.json b/package.json index 0ddcd748f6..7be2f0bcc0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "5.0.3", + "version": "5.0.4", "description": "OriginTrail node", "main": ".eslintrc.js", "config": { @@ -43,7 +43,7 @@ "debug:test:unit": "mocha --inspect-brk --exit $(find test/modules -name '*.js')", "debug:test:api": "mocha --inspect-brk --exit $(find test/api -name '*.js')", "debug:test:protocol": "mocha --inspect-brk --exit $(find test/protocol -name '*.js')", - "ganache": "ganache-cli -i 5777 -p 7545 -l 10000000 -m \"aspect ask story desert profit engage tuition leave fade giraffe exclude brief\"", + "ganache": "ganache-cli -i 5777 -p 7545 -l 10000000 -a 20 -m \"aspect ask story desert profit engage tuition leave fade giraffe exclude brief\"", "truffle:compile": "cd modules/Blockchain/Ethereum && truffle compile", "truffle:deploy:rinkeby": "cd modules/Blockchain/Ethereum && truffle migrate --network rinkeby", "truffle:deploy:supplyTokens": "cd modules/Blockchain/Ethereum && truffle migrate --network supplyTokens", @@ -51,7 +51,8 @@ "truffle:test": "cd modules/Blockchain/Ethereum && truffle test --network test", "truffle:test:litigation": "cd modules/Blockchain/Ethereum && truffle test ./test/litigation.test.js --network test", "truffle:test:creditorhandler": "cd modules/Blockchain/Ethereum && truffle test ./test/creditor.handler.test.js --network test", - "truffle:test:offer": "cd modules/Blockchain/Ethereum && truffle test ./test/offer.test.js --network test" + "truffle:test:offer": "cd modules/Blockchain/Ethereum && truffle test ./test/offer.test.js --network test", + "tools:lns:start": "cd tools/local-network-setup && ./setup_environment.sh" }, "devDependencies": { "babel-eslint": "^8.2.6", diff --git a/test/bdd/features/datalayer.feature b/test/bdd/features/datalayer.feature index aabbd07e62..b088d8cc3b 100644 --- a/test/bdd/features/datalayer.feature +++ b/test/bdd/features/datalayer.feature @@ -146,7 +146,7 @@ Feature: Data layer related features @fourth Scenario: Data read and export successfully - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 4 nodes And I start the nodes And I use 1st node as DC @@ -177,7 +177,7 @@ Feature: Data layer related features @second Scenario: Graph level data encryption - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 4 nodes And I override configuration for all nodes | dc_holding_time_in_minutes | 3 | @@ -224,7 +224,7 @@ Feature: Data layer related features @second Scenario: Node should not respond to network query if he did't replicate it itself - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 4 nodes And I start the nodes And I use 1st node as DC @@ -242,7 +242,7 @@ Feature: Data layer related features @third Scenario: Cover message routing protocol via proxy nodes - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 10 nodes And I override configuration for 3rd node | dh_price_factor | 100 | @@ -325,4 +325,4 @@ Feature: Data layer related features And DC imports "importers/json_examples/local-query2.json" as GRAPH And DC waits for import to finish Given DC runs local query consisting of path: "id", value: "test1" and opcode: "EQ" - Then The last local query should return otObject from the last imported dataset \ No newline at end of file + Then The last local query should return otObject from the last imported dataset diff --git a/test/bdd/features/importer.feature b/test/bdd/features/importer.feature index 94c92ba73e..5741145714 100644 --- a/test/bdd/features/importer.feature +++ b/test/bdd/features/importer.feature @@ -111,7 +111,7 @@ Feature: Test basic importer features @third Scenario: Check that two OT-JSON 1.1 datasets with different order have different hashes - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 4 node And I start the nodes And I use 1st node as DC @@ -133,7 +133,7 @@ Feature: Test basic importer features @fourth Scenario: Check that two OT-JSON 1.2 datasets with different order have the same hashes - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 4 node And I start the nodes And I use 1st node as DC @@ -145,7 +145,6 @@ Feature: Test basic importer features When DV exports the last imported dataset as OT-JSON And DV waits for export to finish And DC imports "importers/use_cases/otjson_1.2/sort2.json" as GRAPH - And DC waits for import to finish - When DV exports the last imported dataset as OT-JSON - And DV waits for export to finish - Then the last two exported datasets from 1st and 1st node should have the same hashes + And I wait for 2 seconds + And DC checks status of the last import + Then the last import should already have been imported diff --git a/test/bdd/features/litigation.feature b/test/bdd/features/litigation.feature index 63d39e18ab..960188080b 100644 --- a/test/bdd/features/litigation.feature +++ b/test/bdd/features/litigation.feature @@ -1,6 +1,6 @@ Feature: Test various litigation scenarios Background: Setup local blockchain and bootstraps - Given the blockchains are set up + Given the blockchain is set up And 1 bootstrap is running @first @@ -15,21 +15,14 @@ Feature: Test various litigation scenarios And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1-EPCIS And DC waits for import to finish - Then DC's last import's hash should be the same as one manually calculated Given DC initiates the replication for last imported dataset - And DC waits for last offer to get written to blockchain And I wait for replications to finish - Then the last root hash should be the same as one manually calculated - Then the last import should be the same on all nodes that replicated data And I wait for challenges to start And I corrupt 1st holder's database ot_vertices collection And I wait for litigation initiation Then 1st holder to litigate should answer litigation Then Litigator node should have completed litigation Then 1st started holder should have been penalized -# Then Litigator should have started replacement for penalized holder -# Then I wait for 3 replacement replications to finish -# Then I wait for replacement to be completed @third Scenario: Test litigation for one holder which has failed to answer challenge but succeeded to answer litigation (correctly) @@ -43,12 +36,9 @@ Feature: Test various litigation scenarios And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1-EPCIS And DC waits for import to finish - Then DC's last import's hash should be the same as one manually calculated Given DC initiates the replication for last imported dataset And DC waits for last offer to get written to blockchain And I wait for replications to finish - Then the last root hash should be the same as one manually calculated - Then the last import should be the same on all nodes that replicated data And I wait for challenges to start And I stop 1 holder And I remember stopped holders @@ -74,17 +64,10 @@ Feature: Test various litigation scenarios And DC waits for last offer to get written to blockchain And I wait for replications to finish And I wait for challenges to start - # Meanwhile add 2 more nodes -# Given I additionally setup 3 nodes -# And I start additional nodes - # Stop the 3rd node who got the deal and and produce litigation When I corrupt 1st holder's database ot_vertices collection And I wait for litigation initiation Then Litigator node should have completed litigation Then 1st started holder should have been penalized -# Then Litigator should have started replacement for penalized holder -# Then I wait for 3 replacement replications to finish -# Then I wait for replacement to be completed @first Scenario: DC should discriminate DH which has reputation lower than threshold diff --git a/test/bdd/features/network.feature b/test/bdd/features/network.feature index a339052f27..09d7e191f2 100644 --- a/test/bdd/features/network.feature +++ b/test/bdd/features/network.feature @@ -5,7 +5,7 @@ Feature: Test basic network features @fourth Scenario: Test replication DC -> DH - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 5 nodes And I start the nodes And I use 1st node as DC @@ -19,7 +19,7 @@ Feature: Test basic network features @first Scenario: Test failing replication DC -> DH - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 2 nodes And I override configuration for all nodes | dc_choose_time | 60000 | @@ -35,7 +35,7 @@ Feature: Test basic network features @skip Scenario: DC->DH->DV replication + DV network read + DV purchase - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 5 nodes And I start the nodes And I use 1st node as DC @@ -60,7 +60,7 @@ Feature: Test basic network features @skip Scenario: DV purchases data directly from DC, no DHes - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 3 node And I start the node And I use 1st node as DC @@ -84,7 +84,7 @@ Feature: Test basic network features @skip Scenario: 2nd DV purchases data from 1st DV, no DHes - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 3 node And I start the node And I use 1st node as DC @@ -145,7 +145,7 @@ Feature: Test basic network features @third Scenario: DH payout scenario - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 5 nodes And I override configuration for all nodes | dc_holding_time_in_minutes | 1 | @@ -160,7 +160,7 @@ Feature: Test basic network features @fourth Scenario: DH with disabled auto-payouts - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 5 nodes And I override configuration for all nodes | dc_holding_time_in_minutes | 1 | @@ -183,7 +183,7 @@ Feature: Test basic network features @second Scenario: Test repeated offer creation with same dataset - Given the replication difficulty is 0 + Given the replication difficulty is 1 And I setup 3 nodes And I start the nodes And I use 1st node as DC @@ -195,4 +195,4 @@ Feature: Test basic network features Given I additionally setup 1 node And I start additional nodes Given DC initiates the replication for last imported dataset - And I wait for replications to finish \ No newline at end of file + And I wait for replications to finish diff --git a/test/bdd/features/trail.feature b/test/bdd/features/trail.feature index 1ccf02a234..f9b91cdc4b 100644 --- a/test/bdd/features/trail.feature +++ b/test/bdd/features/trail.feature @@ -2,13 +2,13 @@ Feature: Trail features Background: Setup local blockchain and bootstraps - Given the blockchains are set up + Given the blockchain is set up And 1 bootstrap is running @fifth Scenario: Check that trail returns the expected objects Given the replication difficulty is 1 - And I setup 4 nodes + And I setup 6 nodes And I start the 1st node And I start the 2nd node And I start the 3rd node @@ -60,55 +60,10 @@ Scenario: Check that trail returns the expected objects And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "selling_batch" And the last traversal should contain 6 objects with type "otObject.properties.objectType" and value "ObjectEvent" And the last traversal should contain 16 objects in total - - @fifth - Scenario: Check that extended trail returns the more objects than a narrow and a default trail - Given the replication difficulty is 1 - And I setup 4 nodes - And I start the 1st node - And I start the 2nd node - And I start the 3rd node - And I start the 4th node - And I use 1st node as DC - And DC imports "importers/sample_files/DC1_01-sample_product_packing.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And DC imports "importers/sample_files/DC1_02-sample_product_shipping.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And I use 2nd node as DC - And DC imports "importers/sample_files/DC2_01-sample_product_receiving.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And DC imports "importers/sample_files/DC2_02-sample_transformation_event.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And DC imports "importers/sample_files/DC2_03-sample_batch_shipping.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And I use 3rd node as DC - And DC imports "importers/sample_files/DC3_01-sample_batch_receiving.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - And DC imports "importers/sample_files/DC3_02-sample_batch_selling.xml" as GS1-EPCIS - And DC waits for import to finish - And DC initiates the replication for last imported dataset - And I wait for replications to finish - # Set 4th node as DC because trail is called from DC node for now -> Expand this functionality in the future - And I use 4th node as DC - When I call traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM" - Then the last traversal should contain 16 objects in total - When I call extended traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM" - Then the last traversal should contain 26 objects in total - When I call narrow traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM" - Then the last traversal should contain 16 objects in total - + When I call extended traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM" + Then the last traversal should contain 26 objects in total + When I call narrow traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM" + Then the last traversal should contain 16 objects in total @fifth Scenario: Check that find trail and trail lookup API routes return expected objects diff --git a/test/bdd/steps/blockchain.js b/test/bdd/steps/blockchain.js index 04af4971bb..38338cd441 100644 --- a/test/bdd/steps/blockchain.js +++ b/test/bdd/steps/blockchain.js @@ -13,8 +13,9 @@ const LocalBlockchain = require('./lib/local-blockchain'); Given(/^the blockchain is set up$/, { timeout: 60000 }, function (done) { expect(this.state.localBlockchain, 'localBlockchain shouldn\'t be defined').to.be.equal(null); - this.state.localBlockchain = new LocalBlockchain({ logger: this.logger }); - this.state.localBlockchain.initialize().then(() => { + this.state.localBlockchain = []; + this.state.localBlockchain[0] = new LocalBlockchain({ logger: this.logger, name: 'stargazer' }); + this.state.localBlockchain[0].initialize().then(() => { done(); }).catch(error => done(error)); }); @@ -24,8 +25,8 @@ Given(/^the blockchains are set up$/, { timeout: 60000 }, async function () { expect(this.state.localBlockchain, 'localBlockchain shouldn\'t be defined').to.be.equal(null); this.state.localBlockchain = []; - this.state.localBlockchain[0] = new LocalBlockchain({ logger: this.logger, port: 7545, name: 'ganache_7545' }); - this.state.localBlockchain[1] = new LocalBlockchain({ logger: this.logger, port: 8545, name: 'ganache_8545' }); + this.state.localBlockchain[0] = new LocalBlockchain({ logger: this.logger, port: 7545, name: 'copernicus' }); + this.state.localBlockchain[1] = new LocalBlockchain({ logger: this.logger, port: 8545, name: 'stargazer' }); const promises = []; for (const blockchain of this.state.localBlockchain) { @@ -37,7 +38,6 @@ Given(/^the blockchains are set up$/, { timeout: 60000 }, async function () { Given(/^the replication difficulty is (\d+)$/, async function (difficulty) { this.logger.log(`The replication difficulty is ${difficulty}`); expect(this.state.localBlockchain, 'localBlockchain is not an array').to.be.an('array'); - expect(this.state.localBlockchain, 'localBlockchain is not an array').to.have.lengthOf(2); for (const blockchain of this.state.localBlockchain) { expect(blockchain.isInitialized, 'localBlockchain not initialized').to.be.equal(true); diff --git a/test/bdd/steps/endpoints.js b/test/bdd/steps/endpoints.js index 353889291f..445c9a2846 100644 --- a/test/bdd/steps/endpoints.js +++ b/test/bdd/steps/endpoints.js @@ -31,7 +31,7 @@ Given(/^DC imports "([^"]*)" as ([GS1\-EPCIS|GRAPH|OT\-JSON|WOT]+)$/, { timeout: this.state.lastImportHandler = importResponse.handler_id; }); -Then(/^DC checks status of the last import$/, { timeout: 1200000 }, async function () { +Then(/^DC checks status of the last import$/, { timeout: 20000 }, async function () { expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); @@ -49,6 +49,17 @@ Then(/^The last import status should be "([^"]*)"$/, { timeout: 1200000 }, async expect(this.state.lastImportStatus.status).to.be.equal(status); }); +Then(/^the last import should already have been imported$/, { timeout: 1200000 }, async function () { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); + expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); + expect(this.state.lastImportStatus.status).to.be.equal('COMPLETED'); + expect(this.state.lastImportStatus.data).to.have.property( + 'message', + 'Dataset already imported on the node, importing skipped', + ); +}); + Given(/^(DC|DV|DV2) waits for import to finish$/, { timeout: 1200000 }, async function (targetNode) { this.logger.log(`${targetNode} waits for import to finish.`); diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js index a4e49d909b..3d70914d9b 100644 --- a/test/bdd/steps/network.js +++ b/test/bdd/steps/network.js @@ -211,7 +211,7 @@ Given(/^I setup (\d+) node[s]*$/, { timeout: 120000 }, function (nodeCount, done done(); }); -Given(/^I wait for (\d+) second[s]*$/, { timeout: 600000 }, waitTime => new Promise((accept) => { +Given(/^I wait for (\d+) second[s]$/, { timeout: 600000 }, waitTime => new Promise((accept) => { expect(waitTime, 'waiting time should be less then step timeout').to.be.lessThan(600); setTimeout(accept, waitTime * 1000); })); diff --git a/test/docker/check_image.sh b/test/docker/check_image.sh index 983d2ff51d..edd6298738 100755 --- a/test/docker/check_image.sh +++ b/test/docker/check_image.sh @@ -4,16 +4,17 @@ set -ev docker ps -a docker build --file Dockerfile.development -t myimage:latest . docker images -# TODO make sure that following hub contract address is in .origintrail_noderc.image +# TODO make sure that following hub contract address is in origintrail_noderc.image cat $ARTIFACTS_DIR/truffle-migrate.log | grep "Hub contract address:" node test/docker/getHubAddress.js -docker run -d --name=mynode -p 8900:8900 -p 5278:5278 -p 3000:3000 --network host -v .origintrail_noderc.image:/ot-node/.origintrail_noderc myimage:latest -# TODO make sure that one of acct-keys is in .origintrail_noderc.image +docker run -d --name=mynode -p 8900:8900 -p 5278:5278 -p 3000:3000 --network host -v origintrail_noderc.image:/ot-node/.origintrail_noderc myimage:latest +# TODO make sure that one of acct-keys is in origintrail_noderc.image # cat $ARTIFACTS_DIR/acct-keys.log # Give some time for node to start sleep 180 # docker container inspect mynode # docker network inspect host +docker logs otnode -f --tail 1000 > $ARTIFACTS_DIR/docker.logs docker exec mynode /bin/sh -c "curl -X POST http://127.0.0.1:8900/api/latest/import -F standard_id=GS1-EPCIS -F file=/importers/xml_examples/Retail/03_Pink_to_orange_shipment.xml" > importResult.json cat importResult.json # TODO better asserts that import response has one key, handler_id diff --git a/test/docker/getHubAddress.js b/test/docker/getHubAddress.js index d7702a1e48..b5b388f9ff 100644 --- a/test/docker/getHubAddress.js +++ b/test/docker/getHubAddress.js @@ -12,9 +12,9 @@ fs.readFile(fileToRead, (err, data) => { const begining = data.toString().indexOf(hint) + hint.length; const end = begining + 42; const hubContractAddress = data.toString().substring(begining, end); - const jsonData = JSON.parse(fs.readFileSync(`${process.env.TRAVIS_BUILD_DIR}/.origintrail_noderc.image`)); + const jsonData = JSON.parse(fs.readFileSync('origintrail_noderc.image')); jsonData.blockchain.implementations[0].hub_contract_address = hubContractAddress; - fs.writeFileSync(`${process.env.TRAVIS_BUILD_DIR}/.origintrail_noderc.image`, JSON.stringify(jsonData)); + fs.writeFileSync('origintrail_noderc.image', JSON.stringify(jsonData)); } else { console.log('Hub not found, something is wrong!'); process.exit(-1); diff --git a/test/modules/command/dc/dc-offer-create-database-command.test.js b/test/modules/command/dc/dc-offer-create-database-command.test.js index 906c246400..fd9d8d2c42 100644 --- a/test/modules/command/dc/dc-offer-create-database-command.test.js +++ b/test/modules/command/dc/dc-offer-create-database-command.test.js @@ -39,7 +39,6 @@ describe('Checks DCOfferCreateDatabaseCommand execute() logic', function () { before('Setup preconditions and call DCOfferCreateDatabaseCommand execute function', async () => { config = rc(pjson.name, defaultConfig); await testUtilities.recreateDatabase(); - // make sure offers table is cleaned up await models.offers.destroy({ diff --git a/test/modules/graph.test.js b/test/modules/graph.test.js index 13fa532d6c..bcd7ea5c04 100644 --- a/test/modules/graph.test.js +++ b/test/modules/graph.test.js @@ -16,7 +16,6 @@ const pjson = require('../../package.json'); describe('graph module ', () => { before('Init GraphStorage', async () => { const config = rc(pjson.name, defaultConfig); - Storage.models = (await models.sequelize.sync()).models; assert.hasAllKeys(config.database, ['provider', 'username', 'password', 'password_file_name', 'host', 'port', 'database', 'max_path_length', 'replication_info']); assert.hasAllKeys(config.database.replication_info, ['endpoint', 'username', 'password', 'port']); diff --git a/test/modules/graphstorage.test.js b/test/modules/graphstorage.test.js index c06e884c5a..de70abecf6 100644 --- a/test/modules/graphstorage.test.js +++ b/test/modules/graphstorage.test.js @@ -37,8 +37,6 @@ describe('GraphStorage module', () => { const config = rc(pjson.name, defaultConfig); selectedDatabase = config.database; selectedDatabase.database = myDatabaseName; - Storage.models = deasync(models.sequelize.sync()).models; - selectedDatabase.database = myDatabaseName; if (selectedDatabase.provider === 'arangodb') { systemDb = new Database(); diff --git a/test/modules/gs1-importer.test.js b/test/modules/gs1-importer.test.js index dd448048e3..f758af5af6 100644 --- a/test/modules/gs1-importer.test.js +++ b/test/modules/gs1-importer.test.js @@ -67,10 +67,6 @@ describe('GS1 Importer tests', () => { }; } - before('Setup models', async () => { - Storage.models = (await models.sequelize.sync()).models; - }); - beforeEach('Setup DB', async function setupDb() { this.timeout(5000); diff --git a/test/modules/test-utilities.js b/test/modules/test-utilities.js index d42838a915..a4874f1b3d 100644 --- a/test/modules/test-utilities.js +++ b/test/modules/test-utilities.js @@ -11,6 +11,7 @@ const logger = require('../../modules/logger'); * @return {Promise} */ function recreateDatabase() { + console.log('sequelizeConfig.storage', sequelizeConfig.storage); fs.closeSync(fs.openSync(sequelizeConfig.storage, 'w')); const migrator = new Umzug({ diff --git a/tools/local-network-setup/Readme.md b/tools/local-network-setup/Readme.md new file mode 100644 index 0000000000..206be64351 --- /dev/null +++ b/tools/local-network-setup/Readme.md @@ -0,0 +1,87 @@ +Local Network Setup Tool +======================== + +#### Run a single command to create an entire testing enviroment for ot-node development. + +The Local Network Setup tool will start a local blockchain, deploy the required smart contracts, set up the configuration files for the nodes and start the nodes in separate windows. +From there you're ready to send API calls to your local nodes and test new features on the ot-node without worrying about funds, servers or network connectivity issues. + +**Note: This tool is an internal tool used by the OriginTrail team and thus is developed for our workflow, meaning that it currently only supports MacOS**, but we encourage you to adapt it for your workflow as well. + +Quick Start +=========== + +## Prerequisites + +* You need to have arangodb installed and running on your machine. You can find instructions on the [ArangoDB website](https://www.arangodb.com/docs/stable/getting-started-installation.html) + +* You should have ot-node dependencies installed with the `npm install` command + +## How to start + +From the ot-node directory, run the below command + +```bash +npm run tools:lns:start +``` + +Usage +===== + +## Specifying the number of nodes + +The LNS tool deploys 4 nodes, each connected to two blockchain implementations which are running on a local ganache process. +You can specify to run anywhere between one and ten nodes with the `--nodes` parameter. + +```bash +npm run tools:lns:start -- --nodes=10 +``` + +The first node will be named `DC`, while subsequent nodes will be named `DH1, DH2, ...`. + +## Editing the node configuration + +### Editing the configuration for all nodes + +If you need to edit the configuration for every node, before you run the nodes you can edit the `config_template.json` file and the new configuration will be loaded during node set up. + +### Editing the configuration for a single node + +If you want to edit a single node's configuration, you can do it in two ways: + +1. Before you start the nodes, edit the `generate_config_files.js` with a specific condition. For example, if you wanted to set the fifth node to reject all offers you'd add something like the following: +```js +if (node_name === 'DH4') { + savedConfig.blockchain.implementations[0].dh_price_factor = "10000000"; + savedConfig.blockchain.implementations[1].dh_price_factor = "10000000"; +} +``` + +2. Once the nodes are set up, each node has its own node configuration file in the `temporary-config-files` directory, which you can edit directly. For example, if you wanted to enable additional logs on the DC node you could add the following to `DC.json`. **Note:** After editing the the configuration this way you'll need to stop and start the node again for the changes to take effect. +```json +{ + "...": "...", + "commandExecutorVerboseLoggingEnabled": true +} +``` + + +Contribution +============ + +OriginTrail is an open source project. We happily invite you to join us in our mission of building decentralised world of supply chain. If you would like to contribute, you are more than welcome. + + +### Useful links + + +[OriginTrail website](https://origintrail.io) + +[OriginTrail documentation page](http://docs.origintrail.io) + +[OriginTrail Discord Group](https://discordapp.com/invite/FCgYk2S) + +[OriginTrail Telegram Group](https://t.me/origintrail) + +[OriginTrail Twitter](https://twitter.com/origin_trail) + diff --git a/tools/local-network-setup/config_template.json b/tools/local-network-setup/config_template.json new file mode 100644 index 0000000000..2d1c0ff741 --- /dev/null +++ b/tools/local-network-setup/config_template.json @@ -0,0 +1,52 @@ +{ + "node_rpc_port": 8900, + "node_port": 5278, + "node_remote_control_port": 3001, + "control_port": 5881, + "local_network_only": true, + "traverse_nat_enabled": false, + "houston_password": "houston", + "database": { + "provider": "arangodb", + "database": "origintrail", + "host": "localhost", + "port": 8529, + "username": "root", + "password": "", + "max_path_length": 1000 + }, + "network": { + "remoteWhitelist": [], + "bootstraps": ["https://127.0.0.1:5278/#781e64952478359bd10b79ab69d42bea189834be"] + }, + "blockchain": { + "implementations": [ + { + "blockchain_title": "Ethereum", + "network_id": "stargazer", + "gas_limit": "2000000", + "gas_price": "20000000000", + "identity_filepath": "stargazer_identity.json", + "hub_contract_address": "0x0987197628Bb06133B6FA2409eb4cF9FCaFe8d3a", + "rpc_server_url": "http://127.0.0.1:7545/" + }, + { + "blockchain_title": "Ethereum", + "network_id": "copernicus", + "gas_limit": "2000000", + "gas_price": "20000000000", + "identity_filepath": "copernicus_identity.json", + "hub_contract_address": "0x791ee543738B997B7A125bc849005B62aFD35578", + "rpc_server_url": "http://127.0.0.1:7545/" + } + ] + }, + "initial_deposit_amount": "10000000000000000000000", + "dc_holding_time_in_minutes": 5, + "logs_level_debug": true, + "dc_choose_time": 90000, + "dc_litigation_interval_in_minutes": 1, + "dh_min_litigation_interval_in_minutes": 1, + "litigationEnabled": true, + "commandExecutorVerboseLoggingEnabled": false +} diff --git a/tools/local-network-setup/generate_config_files.js b/tools/local-network-setup/generate_config_files.js new file mode 100755 index 0000000000..75283f76b5 --- /dev/null +++ b/tools/local-network-setup/generate_config_files.js @@ -0,0 +1,100 @@ +const path = require('path'); +const fs = require('fs'); +const { execSync } = require('child_process'); + +const wallets = require('./pregenerated-values/wallets'); +const template = require('./config_template'); + +const argv = require('minimist')(process.argv.slice(2), { + string: ['number_of_nodes', 'config_path'], +}); + +const path_to_node = path.join(__dirname, '../..'); + +const number_of_nodes = argv.number_of_nodes ? parseInt(argv.number_of_nodes, 10) : 4; +const path_to_config = argv.config_path ? argv.config_path : path.join(__dirname, 'temporary-config-files'); +console.log(`Set path to config files to ${path_to_config}`); + +console.log(`Generating ${number_of_nodes} total nodes`); + +try { + execSync(`rm -rf ${path_to_config}`); +// eslint-disable-next-line no-empty +} catch (e) {} +execSync(`mkdir ${path_to_config}`); + +for (let i = 0; i < number_of_nodes; i += 1) { + let node_name; + if (i === 0) { + console.log('Using the preexisting identity for the first node (bootstrap)'); + node_name = 'DC'; + } else { + node_name = `DH${i}`; + } + console.log(`Configuring node ${node_name}`); + + const configDir = path.join(path_to_config, `${node_name}-config-data`); + const configPath = path.join(path_to_config, `${node_name}.json`); + execSync(`touch ${configPath}`); + + const parsedTemplate = JSON.parse(JSON.stringify(template)); + + parsedTemplate.node_rpc_port = 8900 + i; + parsedTemplate.node_port = 5278 + i; + parsedTemplate.node_remote_control_port = 3000 + i; + parsedTemplate.control_port = 5800 + i; + parsedTemplate.database.database = `origintrail-${node_name}`; + // parsedTemplate.disableAutoPayouts = true; + + parsedTemplate.blockchain.implementations[0].node_wallet = wallets[2 * i].wallet; + parsedTemplate.blockchain.implementations[0].node_private_key = wallets[2 * i].privateKey; + parsedTemplate.blockchain.implementations[0].management_wallet = wallets[2 * i].wallet; + + parsedTemplate.blockchain.implementations[1].node_wallet = wallets[(2 * i) + 1].wallet; + parsedTemplate.blockchain.implementations[1].node_private_key = wallets[(2 * i) + 1].privateKey; + parsedTemplate.blockchain.implementations[1].management_wallet = wallets[(2 * i) + 1].wallet; + + if (node_name === 'DH1') { + parsedTemplate.blockchain.implementations.reverse(); + } + + // Uncomment if you want nodes to have different blockchain setups and for some nodes to be DVs + // if (node_name === 'DH2' || node_name === 'DH5') { + // parsedTemplate.blockchain.implementations.splice(0,1); + // if (node_name === 'DH5') { + // parsedTemplate.blockchain.implementations[0].dh_price_factor = '10000000'; + // } + // } else if (node_name === 'DH3' || node_name === 'DH6') { + // parsedTemplate.blockchain.implementations.splice(1,1); + // if (node_name === 'DH6') { + // parsedTemplate.blockchain.implementations[0].dh_price_factor = '10000000'; + // } + // } else if (node_name === 'DH4' || node_name === 'DH7') { + // parsedTemplate.blockchain.implementations.reverse(); + // if (node_name === 'DH4') { + // parsedTemplate.blockchain.implementations[0].dh_price_factor = '10000000'; + // parsedTemplate.blockchain.implementations[1].dh_price_factor = '10000000'; + // } + // } + + // Uncomment if you want DH4 to be a DV + // if (node_name === 'DH4') { + // parsedTemplate.blockchain.implementations[0].dh_price_factor = '10000000'; + // parsedTemplate.blockchain.implementations[1].dh_price_factor = '10000000'; + // } + + fs.writeFileSync(`${configPath}`, JSON.stringify(parsedTemplate, null, 2)); + try { + execSync(`mkdir ${configDir}`); + execSync(`cd ${path_to_node} && npm run setup:hard -- --configDir=${configDir} --config=${configPath}`); + } catch (e) { + process.exit(1); + } + + if (node_name === 'DC') { + const identityFilePath = path.join(__dirname, 'pregenerated-values'); + execSync(`cp ${identityFilePath}/kademlia.crt ${configDir}/kademlia.crt`); + execSync(`cp ${identityFilePath}/kademlia.key ${configDir}/kademlia.key`); + execSync(`cp ${identityFilePath}/dc_network_identity.json ${configDir}/identity.json`); + } +} diff --git a/tools/local-network-setup/pregenerated-values/dc_network_identity.json b/tools/local-network-setup/pregenerated-values/dc_network_identity.json new file mode 100644 index 0000000000..6b83213825 --- /dev/null +++ b/tools/local-network-setup/pregenerated-values/dc_network_identity.json @@ -0,0 +1 @@ +{"privateKey":"1336c4d2ec074c556fc79f7a700ac6589bfd9d85f4bc5fb37ef5b8f2496d9b58","nonce":4,"proof":"578200008aa80000326b0000a2f8000095040000608f0000011d0000b48f0000a53c0000996b00009f3300007b7b0000bc08000078c00000a2bc00008bf50000d76b0000acc10000ddc8000010e900004610000022ac00005f080000e36300004e2e0000a4fb0000dcb60000c1b70000cda9000085f60000e30500003a4a0000"} \ No newline at end of file diff --git a/tools/local-network-setup/pregenerated-values/kademlia.crt b/tools/local-network-setup/pregenerated-values/kademlia.crt new file mode 100644 index 0000000000..0c48a079e1 --- /dev/null +++ b/tools/local-network-setup/pregenerated-values/kademlia.crt @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICpDCCAYwCCQDJ0nFodUfUWjANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls +b2NhbGhvc3QwHhcNMTgxMjE3MTIwMTE1WhcNMTkxMjE3MTIwMTE1WjAUMRIwEAYD +VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCi +jQUb4XtYx13rf8ImwUY5K8bC+tEBt3U42eSjkjSaH9Wu7CTTUpiQbW3RkPk5+Kq4 +rEZ2wl+7wHNBYDC5Siy2+gbgC6VSqOJN3NxwpoM6Dl/HT3ipMaJ+TwNqkMAPJy1e +H/JOkEm0tCPXRWeu+VGpZdSIbCEsHb0YxdGqa9MkchAuYmY/oairiXUDA/KuzocK +YGarlj8ZiuSC220CioAiwv+FCBIhTCUaY91eeezFvyOkWB02y6w3rtwcIkbszYST +VsrKeMuQ/W9A89ANIfsXRzLQy7k+w07tF76VYu7h4t0wBc5jhtQzY1XKcdkUz/br +7P9yVEHXnEMgM8AhVXktAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAC33PFEwMZYK +KSihXwfO1SxW1aNibVOPV39gcR7n3y3plLowGNsToxr529coYI5rT25Xy67re8GN +ADQ+y4B+63UjLOGNmxhDZyTyz1Mo+eHn9Q6SWZ86DrDcwuKoON+45kFZwt0O//Tq +8SHfBXLo+dh67IQFhvg3i9lfyF5rJyorstyfik+cXLsN1ksOp0aAfmLTndtw8Ayx +qdMisSz6MeJSRjUvGehzrP5JJAwd4QOqoAo1/Dd/nY1kCfa4U5Hl9Io6lGTm0aCo +OXlUda+sEcLy8lNi/ZDn29us1aKVjXbJG0pXsbs7iweQohDigl1ZuByKn6Q+2dy8 +3hazZFVaQK8= +-----END CERTIFICATE----- \ No newline at end of file diff --git a/tools/local-network-setup/pregenerated-values/kademlia.key b/tools/local-network-setup/pregenerated-values/kademlia.key new file mode 100644 index 0000000000..3e9bed0027 --- /dev/null +++ b/tools/local-network-setup/pregenerated-values/kademlia.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAoo0FG+F7WMdd63/CJsFGOSvGwvrRAbd1ONnko5I0mh/Vruwk +01KYkG1t0ZD5OfiquKxGdsJfu8BzQWAwuUostvoG4AulUqjiTdzccKaDOg5fx094 +qTGifk8DapDADyctXh/yTpBJtLQj10VnrvlRqWXUiGwhLB29GMXRqmvTJHIQLmJm +P6Goq4l1AwPyrs6HCmBmq5Y/GYrkgtttAoqAIsL/hQgSIUwlGmPdXnnsxb8jpFgd +NsusN67cHCJG7M2Ek1bKynjLkP1vQPPQDSH7F0cy0Mu5PsNO7Re+lWLu4eLdMAXO +Y4bUM2NVynHZFM/26+z/clRB15xDIDPAIVV5LQIDAQABAoIBAQCKbrisHuJSIC+/ +MKWjJbRtWffC86Ht6VR+XyqVlx0PKQ9tEhTml4/waBE/iUyG71xNPAIif7TSC7rc +MVQtaJseFV6njaN0+vkHwJFHqrdVSQqOjgK/bNLq6NWK+s1l2kKLx38vIzhyGXvw +iF4dqJzAPEW9DQFPNeBl6Yp+rac6gEIIuAySIg2aFvMFn6IxGoNxDEyRMw2+BuGc +gYGiY1JnejwPvdlIvc6WPsSHFulgcQ+diEKlukapCXqdGF+WCi5Aqyhsasr7mTSd +yWZQMb4oZhsb7PUtRfSXfleiaf/jMmPawZXjSSsGX/4RNxOK1cIEvZbmAwq56IuE +Wa0sd3iBAoGBAM3zIlH0pDyz+nlAQF64Db5EmKFA0YUPIZcoL7kcCsZDfOtSijJB +6dTCgqoGuc2SJQLqvPXhTKQKkke0lo5cf2ilNPTZGsEllTFN82quPbD+kAq3TCiG +xSKnnvQ/sCckCx3AWMghGqe2G6h3Ob3RzViEETMiBZn46PjCcUj/EUHhAoGBAMoN +4xO8snKx87/G/W1GoMQrZEb6ACAWEWdIAbeibWAddTk8PStO3nm73CR1ngpMU8Mn +F504+LryoBsxSN7f+AFg05Erz4dY0PYCwJpi2ZBjRGczBfKBzx0Rn6KKkR3iY4BH +2guENu3uz58edQm1bNphM09teYGd3+iMv/xyarjNAoGATcLUnY86h/UIyMlaxKjH +/6RU4OXhdlUNQM6anlEZPFz1CMFEjs9WY9ABmAMsGpNoyB4rtMcZsgKP0uU7x4rj +THa8dQqYUH9JQrGFDC23+2FSr8kfx1mDi58wpFZ7w3uXeudKXl4pe1OGQ+ioc6SI +3TgYam8ZIFbcWDjKn143fOECgYBgyVAp5yzPtiBdkBNbYgamkWaCfUTMGeYSRDLW +ZQ7hiJrbZNikQy5cv7lPGawvoKpLBBHStTSoUIqTHAOdB0oEVv5hPsw1OD04as8Z +5Rn0iSh+CrnhlX7BnOyL7rZey1AAZeqzs4ygYtG/ldZYZTiUTxVGCdm3Vgmomqto +YOJwTQKBgGnvV98kvaPuoYYjog0FX4Xf/Zn9drhA+XDwVb8AbiFsXcpGLgoKFTdm +6Vm5oQc62h6iP1PH5ffAoa3AvuR+vmtlAT1/DpCR8o38Hy7fo+8k6SeA1j0luYfP +28eJlHwDXjSrCLSdfoRdbZx5BOJad6n0aovZYrKgnF3VOxIoqlv2 +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/tools/local-network-setup/pregenerated-values/wallets.json b/tools/local-network-setup/pregenerated-values/wallets.json new file mode 100644 index 0000000000..b3e321ec7b --- /dev/null +++ b/tools/local-network-setup/pregenerated-values/wallets.json @@ -0,0 +1,102 @@ +[ + { "wallet": "0xd6879C0A03aDD8cFc43825A42a3F3CF44DB7D2b9", "privateKey": "02b39cac1532bef9dba3e36ec32d3de1e9a88f1dda597d3ac6e2130aed9adc4e"}, + { "wallet": "0x2f2697b2a7BB4555687EF76f8fb4C3DFB3028E57", "privateKey": "b1c53fd90d0172ff60f14f61f7a09555a9b18aa3c371991d77209cfe524e71e6"}, + { "wallet": "0xBCc7F04c73214D160AA6C892FcA6DB881fb3E0F5", "privateKey": "8ab3477bf3a1e0af66ab468fafd6cf982df99a59fee405d99861e7faf4db1f7b"}, + { "wallet": "0xE4745cE633c2a809CDE80019D864538ba95201E3", "privateKey": "c80796c049af64d07c76ab4cfb00655895368c60e50499e56cdc3c38d09aa88e"}, + { "wallet": "0x193a22749001fA75497fb8fcCE11235947a19b3d", "privateKey": "239d785cea7e22f23d1fa0f22a7cb46c04d81498ce4f2de07a9d2a7ceee45004"}, + { "wallet": "0xFFEE9a020c3DdDE30D0391254E05c8Fe8DC4a680", "privateKey": "021336479aa1553e42bfcd3b928dee791db84a227906cb7cec5982d382ecf106"}, + { "wallet": "0xBBC863B0776f5F8F816dD71e85AaA81449A87D9A", "privateKey": "217479bee25ed6d28302caec069c7297d0c3aefdda81cf91ed754c4d660862ae"}, + { "wallet": "0x64B592e8e9AF51Eb0DBa5d4c18b817C01e8e75a8", "privateKey": "a050f7b3a0479a55e9ddd074d218fbfea302f061e9f21a117a2ec1f0b986a363"}, + { "wallet": "0xb664cf668534FDE04cF43173e2187b7a9196bfC3", "privateKey": "0dbaee2066aacd16d43a9e23649f232913bca244369463320610ffe6ffb0d69d"}, + { "wallet": "0xCE81B24feDb116FaC5A887ED7706B05A46060079", "privateKey": "63b854ff0d973dbd4808a6def4c6a7f65bebcaec07520fbf1c0056331af65a7b"}, + { "wallet": "0xcF9c758Ae7C21D8048Fc1C3cb6164Ff37A5b205e", "privateKey": "7dc40fb38b9a96a63529abb4549d84e0c08e8b923e178150e867685730eadf00"}, + { "wallet": "0xC8b866F2dD57d2889a7cccB989cfb42dB4579411", "privateKey": "fc755938afe379af87c44623bbe70ffa1b3f452a9b9807d75bb27ebe18681286"}, + { "wallet": "0xD242D54ed86A64909d0f990bCe608b065ed07559", "privateKey": "8656c6fba3b82ca3ee5cc60d5e56aaa9e50f73856f69eeb919dbf4c2d1728728"}, + { "wallet": "0x3368d4DBeC10220D7Ba73c3FC65977ED215C62Fc", "privateKey": "89d29d67bbf973542efad822c706d7aa5eb0c655ff4b3f3af84fa64dce1149e5"}, + { "wallet": "0x9d2934024ccC3995056E7E067184B3B0bB8B66Ab", "privateKey": "f0c1ca9b1e6c08b8cf8cf0ebf946d1d2bae2cb33c3cd1721962dd41a4bf1982c"}, + { "wallet": "0x73DF1C18F18AA54DB018b3273E44b1A4713c5fE2", "privateKey": "d9232a58d9a25751dbba8967d43cd4cd87af081db1b22fce72ce0bc92640a7a7"}, + { "wallet": "0xd2c714a04fEA61C599815ec57fAf25ba4F4d496B", "privateKey": "a8755e9fb4643aab2b7109afb4b32821df3c236fae60156f0324086035b88e67"}, + { "wallet": "0xBA9d00b748217381674E277D2447fcaCB78bcAc7", "privateKey": "5d9927a5244623afb1e7f590c7946682620d759724ac86c96792cee6900fd973"}, + { "wallet": "0x34734d828d39ce0B3C8ad22B8578Cd2E3236F277", "privateKey": "223c9895f73f3b60a7fe928db1a9864669d1d21dce153a9928285d0d3c20b066"}, + { "wallet": "0xCF4d6f24Ca163D14389C38DD0C7e89718d17090a", "privateKey": "f5f845f7e25649083fee349afea795e4f587ba1708f1b8c8686e88288fb36197"}, + { "wallet": "0xD15Eb6bF044ed36DfDd2e3a3b84aB81AaB15881D", "privateKey": "867cede8866075a0e5aaacb429019582a87f47929f885902d7d6ba87174b465b"}, + { "wallet": "0x06FD6319da4199BD55AA283787b9fd802082191d", "privateKey": "4f2c9e35799519aab320afe1b2522d24b8edc2b331ce74feaa73d387c0d6515a"}, + { "wallet": "0xc3C828F5B357638265cC09Dd479F60A8E1190801", "privateKey": "46347bff764b0e793ae8607146d49ae677b51fbc31bb150eaddce9d52b2d2ab1"}, + { "wallet": "0x50d2af71026c60648c612190ce92e8257c69B419", "privateKey": "394117b491034f898c5945219f583d521b7c514c37c7002c398156b05f877863"}, + { "wallet": "0x4e6c7afa684B54980aE15aEA191911E3D9B47aba", "privateKey": "69d771fd096a6ff182954d19dc7f9be68de624648d816ec8ef1b718a0277f882"}, + { "wallet": "0x4a68eD404bBd120a3bdab1748dc36EE43a5AE42d", "privateKey": "411841f090a0cc9f2404f24d7d2db707652e76fd1819446149f2c8b0b44124e2"}, + { "wallet": "0xDdbc8EA86Ec762AA4a7aC985fF3c7E7087be9e3B", "privateKey": "7329d841a68f2f25a57f34195ce63eca4b477dd587f3a78554181a588dcd9002"}, + { "wallet": "0xf68B2609F1E240e501D78c78276D7314ba298025", "privateKey": "50409e18b20ba522c909a296b3c378af1c31fb458aa6478988c260b78956ab3d"}, + { "wallet": "0xBaF76aC0d0ef9a2FFF76884d54C9D3e270290a43", "privateKey": "9b9af041edc816692276ac3c8f1d5565e3c01ddff80ec982943a29bd8d1d8863"}, + { "wallet": "0x1B420da5f7Be66567526E32bc68ab29F1A63765A", "privateKey": "743b68c70a13357fd4c19c8bbc42a5b237e8e9a3c91318eb1986b335cc8abb64"}, + { "wallet": "0xCD18Ccc1Fe16D525Cd8fdcD727935eD4eA718964", "privateKey": "79ea5159653ac0384456377637881d8e7822a3280595088e0cad0c1205ce76eb"}, + { "wallet": "0xbcc07fd7BA6F665680a33FADA3483a4849C05DBC", "privateKey": "1a0b5ac3a856c194579e79b988a99d74d1aa0c04986d27baab441df9f0976ba3"}, + { "wallet": "0x89c5e9a3a1787Ea3b7159fe9d3f263de1F93ec57", "privateKey": "697b0e5d97553698886b39e57fed42693151e71991c9d7e0fcf367bfa825f607"}, + { "wallet": "0x4962C03D27DAB2d94Fc8386f59A921fB16bae889", "privateKey": "87da94565531b932339862f0d416c7fb5cd2990268039dd005fdac591a04b1ba"}, + { "wallet": "0x5bbF692311308Bf49bE2289E6747F77EEA3767F9", "privateKey": "6778d24327841409dce0a6a69d661fe7fe8c57bfb48b7c31a6f52d8f9c8b614c"}, + { "wallet": "0x2ba019671839A1eA65E36a4938E11A98689ce8b7", "privateKey": "2cadfcb5213b1eb72cdecfa1aa27dd89c6cbc2c544af7fcf5b4b2edc22cb4aff"}, + { "wallet": "0xA7532d3F5e54402c541216B73EfA167e080735D2", "privateKey": "9e5cde4cce679332d5f1f4418b98295430f6bde9c5ce5b032af7fcdaf52e9420"}, + { "wallet": "0xDEF95418147486d4e2F1f71BD6DdF8aF90eBc1b1", "privateKey": "f7f8d9a40403d71bde8d5dac4df63703006679d84c75a6acc8ae0093b0e1fa59"}, + { "wallet": "0x48E9D6529Fa9Be3379C08f7be9A835388caf2e18", "privateKey": "1e218d20dc6e795d4abe99fdcd5a58d905f8c6706700dfb5176dea844496e641"}, + { "wallet": "0xcc2a7FA6A2D1029B56126Eeb8df2c0833da4a77F", "privateKey": "3db0724477f3e1efb841aed931a14f9f9b6077316a526a03c6cbffe8b36dfe3a"}, + { "wallet": "0xeb8A7eFEEc18Df565b61e391b390D272b0514461", "privateKey": "b4a200f4509ca49bbeecf66a1f70653d8320bbd19734f5217effa6613175fc6d"}, + { "wallet": "0x054088c10f7f9F569aD29181a06dBa8371Acbb0E", "privateKey": "5d10a8ee1bb922fed0bb6935ce646f16456eabb7dc8ff9e5f098fcd946394101"}, + { "wallet": "0x653864534A11766d32502684f6D74CF639CbFc9C", "privateKey": "64bf8a11cc90b3e89bc85fb435283c88c4056ded3c9ae9639d9bd3ca9c054298"}, + { "wallet": "0x5314Ad2Af9646eE9E0267E9e4D821c455Dc5B525", "privateKey": "b3a8689e636ce85b95a6470ef2237f0524f9dbd644a74028b415baae9cceaaf6"}, + { "wallet": "0x714F733F70B6d56C54e9dcA5EdfBc13ED85a5efE", "privateKey": "49e649450af3f7dc726490e809f16ba79b2e8960c9fd45ab157b897d3221acd7"}, + { "wallet": "0xFb440aCeBF4BDcA6c0722A60005415319bB4B75d", "privateKey": "58fb1553278ca6f0fbd10b9f1042c6c9c2c37be104f64b9d10819f9abcc62582"}, + { "wallet": "0xD1038F5b809E76f55FE057AC99b8a8D0620E904E", "privateKey": "c3921567a57b652e3d654c40f2ec8188ef297859d64fbc9015bbdd27bdbe50cf"}, + { "wallet": "0x20E268c8Ef4D0dfeA70b4E1b985762158Ff67E8e", "privateKey": "c28013162886afe71483eec83824076b1f79ab0cf09526697de5e2a57f39079f"}, + { "wallet": "0x05D4e3Ad847E73b7Fbbc885E571F9d191440508A", "privateKey": "6999510d689e65c38d02cd45a910211a4c2907676a3a5875dc5c7dfcdce3e5e6"}, + { "wallet": "0xb87d9C712265e25f01a2950144Aa189a8150b0E5", "privateKey": "8f03ffc692cc15aa3055945d15ec0840aeda203d91afc194a3e5f891f9daf812"}, + { "wallet": "0x5F85555A988a958efCE4134b3D6c766402e89784", "privateKey": "493871821ab59a3c23da7069b2c7d2020a0fb02925eb011a08c2288cf98fc08e"}, + { "wallet": "0x8f876bBD2fC220cC3B923AEfA4d7d94F58010Be1", "privateKey": "7f032356ae237835ac7523810e37c8c4f3ae63d1772a4c59c953b36d1de99be2"}, + { "wallet": "0xa70b93689bB148FA6d8961a9F549406f624474a3", "privateKey": "0e287adb4796e7300cb3ab1aae60e0a79f598b21e6d5a22328d1c2c004b069ae"}, + { "wallet": "0xf56D61A2CC01aE812be9Ffb70873d44Bc2eE0D50", "privateKey": "8595d394909ec1f5aee34e1d29b355f1430e62e1fffe626cf66e5f3823cb5db2"}, + { "wallet": "0x0091eb3Eb9Bd1399F35D9b7D92Bf55ae5edc18a7", "privateKey": "d6df775cd794fe7dc50cda2fd449b23de7050a7a3f1b82ef03803e95c1b315c7"}, + { "wallet": "0xA8405A611afdC9b80773Ab0DcDA3fe232eA428dF", "privateKey": "3c92b42a8a99cc64e9a52e69e4d9f2deb3398f44d9ca0438d345916acf73f824"}, + { "wallet": "0x13c264FF438955b908b0C13118046bc2C5d262E4", "privateKey": "5f83aa60cd20a5800350bb38eac19f5cc04b93eda698ad2a90fea0165f842c53"}, + { "wallet": "0x4651798A8556207fA069D79443e28b617de9AAbA", "privateKey": "e76a159a04f58b4ca519904c82621dac2602f128ac4ce7d91a6b2b0f8bca862c"}, + { "wallet": "0xF5c8a2a5C0D89a31f8D101Fc5E27DAe7B9D0DF78", "privateKey": "67dd80f9315ba24cf1a7413930da212e459cfecbad3b7bc42d3f6ac9e3d772de"}, + { "wallet": "0xD19C3d5C6Fed01DD7d176825737b695Dccf9Bb9E", "privateKey": "1d6675c1c879220093c2575b8d4c61941057bd4b546a5903236ac537be9144fb"}, + { "wallet": "0x0c62A9094c3ffc2A97fC4BB02bbA7f8FC042B9FB", "privateKey": "1f6a2e587bdc81f624a6043257f7672d8af2feb52af6452418c86dd612937c01"}, + { "wallet": "0xB741D5502501cEc1b7F4C6aC33eB8392FcEF8b4d", "privateKey": "850bd23f1434ba436b9e9e5ce97dcaf4bb3e1a4c34281720f865111716cc3929"}, + { "wallet": "0x166e5074c1E03DEF129ffb214f358A5bbf101b6a", "privateKey": "4728562fe3e9cd45f701b9f02fcff53d07ae0700b7dfbd6c9e847359a7bace56"}, + { "wallet": "0xeD6633CA4EE466c53F448D9ab5A5fB8Cb9d5F10f", "privateKey": "99a25f0fab404cd8152b42cf947e1fc04bf94be7a5f0eb28e44dd7228c5f391b"}, + { "wallet": "0x6Ba5a758bBC7d0F3A13Ba84d0E0B1cbfE3Cf8aef", "privateKey": "abee585f322b10736e70b7e2e20ff40468e6de122538eb754f558c99c6a30f93"}, + { "wallet": "0x997CDB231ce46201d553552a6b403E9bE13bD6BE", "privateKey": "4a1466f793683625f9930050d1d19e9f1714f54e642e689dd22f3f097b5c14b4"}, + { "wallet": "0x82eD570A2e187D603F6230b49B04D52d97bb3aAF", "privateKey": "8f33270561bbd29fcc26c0591b54953eab835c9c356fa083b288b81cf54e9dc4"}, + { "wallet": "0xC3ce62EF6D7CcEF67C580D2C77fEbe2fA01d7F6d", "privateKey": "b43f1ae4c789815b0efba960a8d62e5a7f467dc73d286e30eac9d4dcd7c5cbda"}, + { "wallet": "0x2820511CFdA87B1C52e2EbeBeBB4856e9B35aEb9", "privateKey": "ff1574396c39097c5e1672a75259cd587708bced1d608c75f2a6f277605424a6"}, + { "wallet": "0x8e1b5A545b8823208E7C4EAf5c951caED638068E", "privateKey": "98defd024183d9f4c1362b0c0d2ae2e319f5fc946af574abbb0b570270ea2b38"}, + { "wallet": "0x8Ca6ACC974AfdE65dcF148fC51e425272050eDC0", "privateKey": "8fe063d20fd447a8fd86eb98a9cc1f4805cea5ce717ddc96f34386c93b4397f4"}, + { "wallet": "0x0636B9712CE055Ca988347C5Ae847FaD5fE0f5Cd", "privateKey": "ed51a047f2def9067da626c50bccf3555de1edd61e33b02f0ea4fbd137ef7a3c"}, + { "wallet": "0xf1Fbc4F5Cd662a40D54c1122626afd431d814AC8", "privateKey": "2b9df86c7d39780b3e218276bc714f387152a8c117944a387ebba0ec8d0f3bea"}, + { "wallet": "0x6ce962A73BefA042379750247C8e3bD3B4271a6e", "privateKey": "e41107999c81d4200b8cdcfcc66736f208e08f843b8d7e2db68f2db45a9fb44e"}, + { "wallet": "0x7eE7E53fc3796C38C115f3b13e710B3647C0b00f", "privateKey": "9e88755f2993d2bbcec7839e1362007744e6ab77ad6e9d87f871a1774557a687"}, + { "wallet": "0xFC556b7B0cc5145691f98a13b47C4C321382BC52", "privateKey": "e5aede550805a5021193203fa5f90d55f3c753ccbd1ec2379ff9000e403fabd4"}, + { "wallet": "0x2F567A03d319681b346E64C28A6215E6CD4fDC9a", "privateKey": "04b5495f6047c7301112c460e58b0dbe11c562ecba865f753fed5920c44eb0ab"}, + { "wallet": "0xA5d024656AB6406994584B87f3353e9e0D399C60", "privateKey": "bced8c7bc0fbdbcd280ff603ceb6b0fadae81f235502ddd62d5bcb48d7d4a691"}, + { "wallet": "0x0B1fDc73ea21a139048C82718611B5B6b1FeD863", "privateKey": "f639afbdc7c3f6985064deb56269b865009b123d6fc55eaea9a09a7cfb9ad4d5"}, + { "wallet": "0x6b9BaEd3dF58653663888F48d0Ccd339cE8cd267", "privateKey": "2de3caf750b0f374fa9c765840054a97182d1812467ee7b1d3c9b0d891888898"}, + { "wallet": "0x7ddb04eA3bB547af5ae2012D353F2513fb924888", "privateKey": "eb09fdce5809d32f7c472c7502f5de1bb06612ee8c7c3ce7290b6c3ce544dcc9"}, + { "wallet": "0x9233Fc741ca65b6E41E2458229d385B02548f85F", "privateKey": "0961bed5895729f4d835362bf47b2297a197f8bf38598a8e7a59af131bca99b6"}, + { "wallet": "0xbd74D80601Dc10EB143cf48a6D84540A24480A9b", "privateKey": "7f0fd10c186c72f9d02952c922a94eaf2a6a45d6c986faef43ef2e72a9a7c313"}, + { "wallet": "0x2F17FE5c56F1027DCadbf096456c0485a23a702e", "privateKey": "4631416cb1e2340ac37b22d49d1c45664c3b8727f7d89bdb1ba3760abc531bf0"}, + { "wallet": "0xa10BECFc098a9c4224eee27C62A77A2B2972333B", "privateKey": "e46a4a97ab567c9a33aaf8173c305338977f5dd4b973ca96d7f4351a28b334f2"}, + { "wallet": "0xD6A1649e6E81d3B46084226f7aa89D7983107B48", "privateKey": "b8a9973661a63df8ac8a5eb54ae5568bcf37f56e83b0652b9f3f63b961b7e910"}, + { "wallet": "0x8d2E76074c4C50c484F9dAC98dEb3946540Cdd2c", "privateKey": "39c096cbdbda78b1713c8860b0a42b5001e7b7c7ca080018696acd6810385e99"}, + { "wallet": "0x7b194af13b84D1577E3D810cD8027A7E9eB7794C", "privateKey": "736ae83e4a625f3ecf3defd6556a84ac07fdcfd6ef9ee76521b2add87c39d18f"}, + { "wallet": "0xB0b10e021E4d34E9411f3ED60C4E2c8F5957823a", "privateKey": "a06d4fd8b97b8bd2631e8561a37171cb005ad4369250bc60ae7af1419b28177b"}, + { "wallet": "0xF21F0eabf564A725Fb78A3d53B585B37F13C0f88", "privateKey": "628f0140688904e834be45548d2d3f77a038053bef395c26114201b03cb296af"}, + { "wallet": "0x059cFcc3fa830E5B1E75659Ec236044e573B936f", "privateKey": "3e8506b7cce21cd2ae16f0b29c71fa754f0423bafaff996664e34dcb9e0e03db"}, + { "wallet": "0xD64C7Ab11C68A99d0F1d5d69dA32c1d1eDAbCaF5", "privateKey": "bdc88ca17adaacccb863c07cfc5a04fba5e0c7d63416ba272cb6ee7f9847b4b0"}, + { "wallet": "0x4933C879c72a0fbdC9158Fdf4cB578Adc4ECa83F", "privateKey": "28bcc1178a4d8a37f2db861034ae26fb8ec0f15ce4c0eefd41657dc5f50e435b"}, + { "wallet": "0x950803CB1937D7D29e5649Dba6acD113eFB9d7E1", "privateKey": "e73381f36fe0daa0b6cecf395768f7a9cf2243cbf3134946fde4026aab74ee3b"}, + { "wallet": "0xE8D1AB54F2e20f5a182CF715a5F4f68e9a20F681", "privateKey": "81d45399c0ca35eab2226fed08ffda7133aeb238ccb8499eaa832392ba0e5888"}, + { "wallet": "0xA3CBcAc553Da46F46277f4249758e03bBa7eff8f", "privateKey": "bdf78c1f0e7f0fe6a32a81d9440544fa53d873ac73cdf4f282beb236fca83dce"}, + { "wallet": "0x66Cf997858A2E319715d4F5060C1F59763C792e3", "privateKey": "c588165141e433d1b43bbd2f11c25ba34686f45c1ccdb8c5ab5314ec857ab1f2"}, + { "wallet": "0xAc397775F320995003CB94fA5e0F4F3fa9Cd4550", "privateKey": "d001b91203f4b8ac42365098ddc4eab6ff91913aa38fa6bc5733ae965771da34"}, + { "wallet": "0xC39B1452F7eb3a437737Ae80a0d9fEe4e9C97Dc1", "privateKey": "cb08260623dee991ef0e94e09428e4b75d1fc006449d6785b3d251f784565d72"}, + { "wallet": "0x82bcfbf4Ca94FEA18bab6e0bf78497C745B4dcac", "privateKey": "f46a7502c87a9ce63e1d339c30f4bbd76509f91b2ebec6231f9f053612f9764c"} +] diff --git a/tools/local-network-setup/setup_environment.sh b/tools/local-network-setup/setup_environment.sh new file mode 100755 index 0000000000..78b41cfd01 --- /dev/null +++ b/tools/local-network-setup/setup_environment.sh @@ -0,0 +1,111 @@ +#!/bin/sh + +startingFolder=$(cd ../../../ && pwd) + +pathToOtNode="$startingFolder/ot-node" +pathToConfigFiles="$pathToOtNode/tools/local-network-setup/temporary-config-files" + +number_of_nodes=4 +# Check for script arguments +while [ $# -gt 0 ]; do + case "$1" in + # Override number of nodes if the argument is specified + --nodes=*) + number_of_nodes="${1#*=}" + if [[ $number_of_nodes -le 0 ]] + then + echo Cannot run 0 nodes + exit 1 + elif [[ $number_of_nodes -gt 10 ]]; then + echo Cannot run more than 10 nodes + exit 1 + fi + ;; + # Print script usage if --help is given + --help) + echo "Set up configurations and run a local blockchain and nodes (default 4) locally" + echo "Use --nodes= to specify the number of data nodes to generate (limit 10 nodes)" + exit 0 + ;; + *) + printf "***************************\n" + printf "* Error: Invalid argument.*\n" + printf "***************************\n" + exit 1 + esac + shift +done + +echo ============================== +echo ====== Starting ganache ====== +echo ============================== + + +ganachePID="$(ps aux | grep '[g]anache-cli' | head -1 | awk '{print $2}')" +if [ $ganachePID ] +then + echo Ganache is already running, stopping previous ganache process... + kill -9 $ganachePID +fi + +osascript -e " + tell app \"Terminal\" + do script \"cd $pathToOtNode && npm run ganache\" + end tell + " + +echo =============================== +echo ===== Deploying contracts ===== +echo =============================== + +sleep 7 +npm run truffle:deploy:ganache +npm run truffle:deploy:ganache + +echo ================================ +echo ======= Setting up nodes ======= +echo ================================ + +node $pathToOtNode/tools/local-network-setup/generate_config_files.js --number_of_nodes=$number_of_nodes \ +--config_path=$pathToConfigFiles --path_to_node=$pathToOtNode + +RESULT=$? +if [ $RESULT -ne 0 ]; then + echo ================================================== + echo ======== Setting up nodes failed, exiting ======== + echo ================================================== + exit 1 +fi + +echo ================================ +echo ======== Starting nodes ======== +echo ================================ + +startNode() { + echo Starting node $1 + osascript -e "tell app \"Terminal\" + do script \"cd $pathToOtNode + npm start -- --configDir=$pathToConfigFiles/$1-config-data --config=$pathToConfigFiles/$1.json\" + end tell" +} + + +startNode DC + +# Start only DC node and exit +if [[ $number_of_nodes -ne 1 ]] +then + # Wait for the DC node to set up, then start remaining nodes + echo Waiting for DC node to set up before continuing... + sleep 15 + i=1 + while [[ $i -lt $number_of_nodes ]] + do + startNode DH$i + ((i = i + 1)) + done +fi + + + +