diff --git a/.travis.yml b/.travis.yml index 55b1b14eb7..b2f95b4ed0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,17 @@ +dist: xenial language: node_js node_js: - "9" env: - - NODE_ENV=development + - NODE_ENV=development ARTIFACTS_DIR=$TRAVIS_BUILD_DIR/artifacts CUCUMBER_ARTIFACTS_DIR=$ARTIFACTS_DIR/cucumber sudo: enabled +addons: + apt: + packages: + - s3cmd before_script: + - mkdir -p $ARTIFACTS_DIR + - mkdir -p $CUCUMBER_ARTIFACTS_DIR - echo arangodb3 arangodb/password password root | sudo debconf-set-selections # set username 'root' - echo arangodb3 arangodb/password_again password root | sudo debconf-set-selections # set password 'root' - chmod +x setup_arangodb.sh @@ -13,34 +20,28 @@ before_script: - cp .origintrail_noderc.travis .origintrail_noderc - npm run bootstrap - npm install -g ganache-cli@6.1.5 &> /dev/null - - npm install -g truffle@5.0.0-beta.1 &> /dev/null + - npm install -g truffle@5.0.0-beta.1 &> /dev/null script: - npm run lint - - 'if [ "$TRAVIS_EVENT_TYPE" != "push" ]; then npm run test:bdd; fi' + - if [ "$TRAVIS_EVENT_TYPE" != "push" ]; then + npm run test:bdd:dryrun + npm run test:bdd -- --world-parameters '{"appDataBaseDir":"$CUCUMBER_ARTIFACTS_DIR","keepFailedArtifacts":true}'; + fi # checks for arangodb based solution - - npm test 2> mocha-logs.log - - npm start &>arangodb.log & + - npm test 2> $ARTIFACTS_DIR/mocha-logs.log + - npm start &> $ARTIFACTS_DIR/app-start.log & - sleep 10 - jobs - if [ -n "$(jobs -p)" ]; then kill %1; fi # compile and check Smart Contracts - - ganache-cli -i 5777 -p 7545 -l 10000000 -m "aspect ask story desert profit engage tuition leave fade giraffe exclude brief" &>ganache.log & + - ganache-cli -i 5777 -p 7545 -l 10000000 -m "aspect ask story desert profit engage tuition leave fade giraffe exclude brief" &> $ARTIFACTS_DIR/ganache.log & - cd modules/Blockchain/Ethereum - - truffle test --network test > ../../../truffle-test.log - - rm -rf build && truffle migrate --reset --compile-all --network ganache > ../../../truffle-migrate.log - - cd ../../../ + - truffle test --network test > $ARTIFACTS_DIR/truffle-test.log + - rm -rf build && truffle migrate --reset --compile-all --network ganache > $ARTIFACTS_DIR/truffle-migrate.log + - cd $TRAVIS_BUILD_DIR - jobs - - kill %1 + - kill -9 %1 after_script: - - cat mocha-logs.log - - rm -f mocha-logs.log - - cat arangodb.log - - rm -f arangodb.log - - cat truffle-test.log - - rm -f truffle-test.log - - cat truffle-migrate.log - - rm -f truffle-migrate.log - # enable line bellow in case you are interested in reading ganache-cli logs - # - cat ganache.log - - rm -f ganache.log + - s3cmd --acl-public put --recursive $ARTIFACTS_DIR/* s3://ot-travis-ci/${TRAVIS_REPO_SLUG}/${TRAVIS_BUILD_NUMBER}/${TRAVIS_JOB_NUMBER}/ + - echo "Uploaded to s3://ot-travis-ci/${TRAVIS_REPO_SLUG}/${TRAVIS_BUILD_NUMBER}/${TRAVIS_JOB_NUMBER}/" - kill -9 $(pgrep arangod) diff --git a/config/config.json b/config/config.json index 3036f8430e..02a1d1a26d 100644 --- a/config/config.json +++ b/config/config.json @@ -89,7 +89,7 @@ "branch": "develop" }, "dataSetStorage": "data_set_storage", - "dc_holding_time_in_minutes": 10080, + "dc_holding_time_in_minutes": 60, "dc_token_amount_per_holder": "50000000000000000000", "dc_litigation_interval_in_minutes": 5, "dh_max_holding_time_in_minutes": 10080, @@ -189,7 +189,7 @@ "branch": "release/staging" }, "dataSetStorage": "data_set_storage", - "dc_holding_time_in_minutes": 1440, + "dc_holding_time_in_minutes": 60, "dc_token_amount_per_holder": "1", "dc_litigation_interval_in_minutes": 5, "dh_max_holding_time_in_minutes": 1440, @@ -289,7 +289,7 @@ "branch": "release/stable" }, "dataSetStorage": "data_set_storage", - "dc_holding_time_in_minutes": 10080, + "dc_holding_time_in_minutes": 60, "dc_token_amount_per_holder": "50000000000000000000", "dc_litigation_interval_in_minutes": 5, "dh_max_holding_time_in_minutes": 10080, @@ -392,7 +392,7 @@ "branch": "master" }, "dataSetStorage": "data_set_storage", - "dc_holding_time_in_minutes": 10080, + "dc_holding_time_in_minutes": 60, "dc_token_amount_per_holder": "50000000000000000000", "dc_litigation_interval_in_minutes": 5, "dh_max_holding_time_in_minutes": 10080, diff --git a/modules/Blockchain/Ethereum/contracts/Profile.sol b/modules/Blockchain/Ethereum/contracts/Profile.sol index a759217973..8a21741487 100644 --- a/modules/Blockchain/Ethereum/contracts/Profile.sol +++ b/modules/Blockchain/Ethereum/contracts/Profile.sol @@ -168,22 +168,22 @@ contract Profile { } require(minimalStake <= profileStorage.getStake(payer).sub(profileStorage.getStakeReserved(payer)), - "Profile does not have enough stake to take new jobs!"); + "Data creator does not have enough stake to take new jobs!"); require(minimalStake <= profileStorage.getStake(identity1).sub(profileStorage.getStakeReserved(identity1)), - "Profile does not have enough stake to take new jobs!"); + "First profile does not have enough stake to take new jobs!"); require(minimalStake <= profileStorage.getStake(identity2).sub(profileStorage.getStakeReserved(identity2)), - "Profile does not have enough stake to take new jobs!"); + "Second profile does not have enough stake to take new jobs!"); require(minimalStake <= profileStorage.getStake(identity3).sub(profileStorage.getStakeReserved(identity3)), - "Profile does not have enough stake to take new jobs!"); + "Third profile does not have enough stake to take new jobs!"); require(profileStorage.getStake(payer).sub(profileStorage.getStakeReserved(payer)) >= amount.mul(3), - "Profile does not have enough stake for reserving!"); + "Data creator does not have enough stake for reserving!"); require(profileStorage.getStake(identity1).sub(profileStorage.getStakeReserved(identity1)) >= amount, - "Profile does not have enough stake for reserving!"); + "First profile does not have enough stake for reserving!"); require(profileStorage.getStake(identity2).sub(profileStorage.getStakeReserved(identity2)) >= amount, - "Profile does not have enough stake for reserving!"); + "Second profile does not have enough stake for reserving!"); require(profileStorage.getStake(identity3).sub(profileStorage.getStakeReserved(identity3)) >= amount, - "Profile does not have enough stake for reserving!"); + "Third profile does not have enough stake for reserving!"); profileStorage.increaseStakesReserved( diff --git a/modules/EventEmitter.js b/modules/EventEmitter.js index 216035183a..06acc64690 100644 --- a/modules/EventEmitter.js +++ b/modules/EventEmitter.js @@ -338,13 +338,6 @@ class EventEmitter { this._on('api-network-query', (data) => { logger.info(`Network-query handling triggered with query ${JSON.stringify(data.query)}.`); - if (!appState.enoughFunds) { - data.response.status(400); - data.response.send({ - message: 'Insufficient funds', - }); - return; - } dvController.queryNetwork(data.query) .then((queryId) => { diff --git a/modules/GS1Importer.js b/modules/GS1Importer.js index bfdc700dd2..4a10c4c978 100644 --- a/modules/GS1Importer.js +++ b/modules/GS1Importer.js @@ -801,12 +801,12 @@ class GS1Importer { // eslint-disable-next-line const { vertices: newDenormalizedVertices, edges: newDenormalizedEdges } = denormalizeGraph(dataSetId, allVertices, allEdges); - allVertices.map((v) => { + newDenormalizedVertices.map((v) => { v.inTransaction = true; return v; }); await Promise.all(newDenormalizedVertices.map(vertex => this.db.addVertex(vertex))); - allEdges.map((e) => { + newDenormalizedEdges.map((e) => { e.inTransaction = true; return e; }); @@ -824,6 +824,15 @@ class GS1Importer { await this.db.commit(); + normalizedVertices.map((v) => { + delete v.inTransaction; + return v; + }); + normalizedEdges.map((e) => { + delete e.inTransaction; + return e; + }); + return { vertices: normalizedVertices, edges: normalizedEdges, diff --git a/modules/command/dh/dh-offer-handle-command.js b/modules/command/dh/dh-offer-handle-command.js index 0d4053d0f9..433d40d7ee 100644 --- a/modules/command/dh/dh-offer-handle-command.js +++ b/modules/command/dh/dh-offer-handle-command.js @@ -31,7 +31,13 @@ class DHOfferHandleCommand extends Command { }, dcNodeId); if (response.status === 'fail') { - throw new Error(`Failed to receive replication from ${dcNodeId} for offer ${offerId}`); + if (response.message) { + throw new Error('Failed to receive replication ' + + `from ${dcNodeId} for offer ${offerId}. ` + + `Reason: ${response.message}`); + } else { + throw new Error(`Failed to receive replication from ${dcNodeId} for offer ${offerId}.`); + } } const bid = await Models.bids.findOne({ diff --git a/modules/command/dv/dv-data-read-response-free-command.js b/modules/command/dv/dv-data-read-response-free-command.js index 099278b012..df37e06626 100644 --- a/modules/command/dv/dv-data-read-response-free-command.js +++ b/modules/command/dv/dv-data-read-response-free-command.js @@ -4,6 +4,7 @@ const Models = require('../../../models/index'); const Command = require('../command'); const ImportUtilities = require('../../ImportUtilities'); const Graph = require('../../Graph'); +const Utilities = require('../../Utilities'); /** * Handles data read response for free. @@ -73,10 +74,9 @@ class DVDataReadResponseFreeCommand extends Command { // Calculate root hash and check is it the same on the SC. const { vertices, edges } = message.data; - const fingerprint = await this.blockchain.getRootHash(dataSetId); - if (!fingerprint) { + if (!fingerprint || Utilities.isZeroHash(fingerprint)) { const errorMessage = `Couldn't not find fingerprint for Dc ${dcWallet} and import ID ${dataSetId}`; this.logger.warn(errorMessage); networkQuery.status = 'FAILED'; diff --git a/modules/network/kademlia/kademlia.js b/modules/network/kademlia/kademlia.js index 58bc24d1f2..2a6fccbbfb 100644 --- a/modules/network/kademlia/kademlia.js +++ b/modules/network/kademlia/kademlia.js @@ -12,6 +12,8 @@ const sleep = require('sleep-async')().Promise; const leveldown = require('leveldown'); const PeerCache = require('./peer-cache'); const ip = require('ip'); +const uuidv4 = require('uuid/v4'); + const KadenceUtils = require('@kadenceproject/kadence/lib/utils.js'); const { IncomingMessage, OutgoingMessage } = require('./logger'); @@ -173,6 +175,48 @@ class Kademlia { this.log.info('Starting OT Node...'); this.node.eclipse = this.node.plugin(kadence.eclipse()); this.node.quasar = this.node.plugin(kadence.quasar()); + + const quasarPublish = function (topic, contents, options = {}, callback = () => null) { + if (typeof options === 'function') { + callback = options; + options = {}; + } + + const publicationId = uuidv4(); + const neighbors = [...this.node.router.getClosestContactsToKey( + options.routingKey || this.node.identity.toString('hex'), + kadence.constants.ALPHA * 3, + ).entries()]; + + const errors = []; + let sentSoFar = 0; + async.eachLimit(neighbors, kadence.constants.ALPHA, (contact, done) => { + if (sentSoFar >= kadence.constants.ALPHA) { + // Achieved desired publications. + done(); + return; + } + this.node.send(kadence.quasar.QuasarPlugin.PUBLISH_METHOD, { + uuid: publicationId, + topic, + contents, + publishers: [this.node.identity.toString('hex')], + ttl: kadence.constants.MAX_RELAY_HOPS, + }, contact, (error) => { + if (error) { + errors.push(error); + } else { + sentSoFar += 1; + } + done(); + }); + }, (error) => { + callback(error, sentSoFar); + }); + }; + + this.node.quasar.quasarPublish = quasarPublish.bind(this.node.quasar); + this.log.info('Quasar initialised'); this.node.peercache = this.node.plugin(PeerCache(path.join( @@ -604,11 +648,17 @@ class Kademlia { node.publish = async (topic, message, opts = {}) => new Promise((resolve, reject) => { node.quasar.quasarPublish( topic, message, opts, - (err, res) => { + (err, successfulPublishes) => { if (err) { reject(err); } else { - resolve(res); + if (successfulPublishes.length === 0) { + // Publish failed. + reject(Error('Publish failed.')); + return; + } + this.log.debug(`Published successfully to ${successfulPublishes} peers.`); + resolve(); } }, ); diff --git a/modules/service/dc-service.js b/modules/service/dc-service.js index 5a463212e4..b36503ab9d 100644 --- a/modules/service/dc-service.js +++ b/modules/service/dc-service.js @@ -226,7 +226,9 @@ class DCService { this.logger.info(`Request for replication of offer external ID ${offerId} received. Sender ${identity}`); if (!offerId || !wallet) { - this.logger.warn('Asked replication without providing offer ID or wallet.'); + const message = 'Asked replication without providing offer ID or wallet.'; + this.logger.warn(message); + await this.transport.sendResponse(response, { status: 'fail', message }); return; } @@ -239,12 +241,16 @@ class DCService { ], }); if (!offerModel) { - this.logger.warn(`Replication request for offer external ID ${offerId} that I don't know.`); + const message = `Replication request for offer external ID ${offerId} that I don't know.`; + this.logger.warn(message); + await this.transport.sendResponse(response, { status: 'fail', message }); return; } const offer = offerModel.get({ plain: true }); if (offer.status !== 'STARTED') { - this.logger.warn(`Replication request for offer external ${offerId} that is not in STARTED state.`); + const message = `Replication request for offer external ${offerId} that is not in STARTED state.`; + this.logger.warn(message); + await this.transport.sendResponse(response, { status: 'fail', message }); return; } diff --git a/package-lock.json b/package-lock.json index aeab452916..c1f2728e64 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "2.0.26", + "version": "2.0.27", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 55bd55641c..874aaf5108 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "2.0.26", + "version": "2.0.27", "description": "OriginTrail node", "main": ".eslintrc.js", "config": { @@ -16,8 +16,9 @@ "test:api:nocov": "mocha --exit $(find test/api -name '*.js')", "test:protocol": "nyc mocha --exit $(find test/protocol -name '*.js')", "test:protocol:nocov": "mocha --exit $(find test/protocol -name '*.js')", - "test:bdd": "cucumber-js --fail-fast --format progress-bar --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", - "test:bdd:cov": " nyc cucumber-js --fail-fast --format progress-bar --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", + "test:bdd": "cucumber-js --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", + "test:bdd:dryrun": "cucumber-js --dry-run test/bdd/ -r test/bdd/steps/", + "test:bdd:cov": " nyc cucumber-js --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", "test:bdd:verbose": "cucumber-js --fail-fast --format event-protocol --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", "start": "node node_version_check.js && node ot-node.js", "debug:start": "node --nolazy --inspect-brk ot-node.js", @@ -31,7 +32,8 @@ "setup:hard:all": "node ./scripts/setup.js --hard --all", "debug:test:unit": "mocha --inspect-brk --exit $(find test/modules -name '*.js')", "debug:test:api": "mocha --inspect-brk --exit $(find test/api -name '*.js')", - "debug:test:protocol": "mocha --inspect-brk --exit $(find test/protocol -name '*.js')" + "debug:test:protocol": "mocha --inspect-brk --exit $(find test/protocol -name '*.js')", + "ganache": "ganache-cli -i 5777 -p 8545 -l 10000000 >/dev/null & sleep 5 && tenderly proxy --proxy-port 7545" }, "devDependencies": { "babel-eslint": "^8.2.6", diff --git a/test/api/zkGetTrail.test.js b/test/api/zkGetTrail.test.js index 46fe98c021..9be95eca0b 100644 --- a/test/api/zkGetTrail.test.js +++ b/test/api/zkGetTrail.test.js @@ -81,17 +81,17 @@ describe('Check ZK by quering /api/trail for EVENT vertices', () => { inputXmlFiles.forEach((xmlFile) => { let queryObject; let myTrail; - it.skip(`zero knowledge status check for EVENT in ${path.basename(xmlFile.args[0])} file`, async () => { + it(`zero knowledge status check for EVENT in ${path.basename(xmlFile.args[0])} file`, async () => { await gs1.parseGS1(await Utilities.fileContents(xmlFile.args[0])); switch (path.basename(xmlFile.args[0])) { case 'Transformation.xml': - queryObject = { uid: 'CARENGINES_PROVIDER_ID:2015-03-15T00:00:00.000-04:00Z-04:00' }; + queryObject = { uid: 'urn:ot:object:actor:id:Car.Engines:2015-03-15T00:00:00.000-04:00Z-04:00' }; break; case 'GraphExample_1.xml': queryObject = { uid: 'urn:ot:object:actor:id:Company_1:2015-04-17T00:00:00.000-04:00Z-04:00' }; break; case 'GraphExample_2.xml': - queryObject = { uid: 'SENDER_ID:2015-03-15T00:00:00.000-04:00Z-04:00' }; + queryObject = { uid: 'urn:ot:object:actor:id:Company _1:2015-03-15T00:00:00.000-04:00Z-04:00' }; break; case 'GraphExample_3.xml': queryObject = { uid: 'urn:ot:object:actor:id:Company_2:2015-04-17T00:00:00.000-04:00Z-04:00' }; @@ -106,8 +106,6 @@ describe('Check ZK by quering /api/trail for EVENT vertices', () => { myTrail = await product.getTrailByQuery(queryObject); - assert.isAbove(Object.keys(myTrail).length, 0); - Object.keys(myTrail).forEach((key, index) => { if (myTrail[key].vertex_type === 'EVENT') { switch (path.basename(xmlFile.args[0])) { diff --git a/test/bdd/features/datalayer.feature b/test/bdd/features/datalayer.feature index 1532fa7e7b..d316f553e6 100644 --- a/test/bdd/features/datalayer.feature +++ b/test/bdd/features/datalayer.feature @@ -1,4 +1,4 @@ -Feature: ERC725 Profile features +Feature: Data layer related features Background: Setup local blockchain and bootstraps Given the blockchain is set up And 1 bootstrap is running @@ -8,12 +8,12 @@ Feature: ERC725 Profile features And I start the nodes And I use 1st node as DC And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1 - Given DC initiates the replication - And I wait for 10 seconds + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain And I remember previous import's fingerprint value And DC imports "importers/xml_examples/Basic/02_Green_to_pink_shipment.xml" as GS1 - And DC initiates the replication - And I wait for 10 seconds + And DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain Then checking again first import's root hash should point to remembered value Scenario: Smoke check data-layer basic endpoints @@ -26,10 +26,21 @@ Feature: ERC725 Profile features Given I query DC node locally for last imported data set id Then response hash should match last imported data set id - Scenario: Basic dataset integrity with its xml + Scenario: Basic dataset integrity with it's xml Given I setup 1 node And I start the node And I use 1st node as DC And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1 Then imported data is compliant with 01_Green_to_pink_shipment.xml file + Scenario: Dataset immutability I + Given I setup 1 node + And I start the node + And I use 1st node as DC + And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1 + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain + And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain + Then DC manually calculated datasets data and root hashes matches ones from blockchain diff --git a/test/bdd/features/importer.feature b/test/bdd/features/importer.feature index 82f84ab090..fa35e0794b 100644 --- a/test/bdd/features/importer.feature +++ b/test/bdd/features/importer.feature @@ -8,11 +8,11 @@ Feature: Test basic importer features And I start the node And I use 1st node as DC And DC imports "importers/json_examples/WOT_Example_1.json" as WOT - Given DC initiates the replication - And I wait for 10 seconds + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain And I remember previous import's fingerprint value And DC imports "importers/json_examples/WOT_Example_2.json" as WOT - Then the last import's hash should be the same as one manually calculated + Then DC's last import's hash should be the same as one manually calculated Then checking again first import's root hash should point to remembered value Scenario: Check that WOT import is connecting to the same batch from GS1 import diff --git a/test/bdd/features/network.feature b/test/bdd/features/network.feature index 2abe10c115..440e84e572 100644 --- a/test/bdd/features/network.feature +++ b/test/bdd/features/network.feature @@ -14,8 +14,8 @@ Feature: Test basic network features And I start the nodes And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 - Then the last import's hash should be the same as one manually calculated - Given DC initiates the replication + Then DC's last import's hash should be the same as one manually calculated + Given DC initiates the replication for last imported dataset And I wait for replications to finish Then the last root hash should be the same as one manually calculated Then the last import should be the same on all nodes that replicated data @@ -26,8 +26,8 @@ Feature: Test basic network features And I start the nodes And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 - Then the last import's hash should be the same as one manually calculated - Given DC initiates the replication + Then DC's last import's hash should be the same as one manually calculated + Given DC initiates the replication for last imported dataset And I wait for replications to finish Then the last import should be the same on all nodes that replicated data Given I additionally setup 1 node @@ -37,6 +37,8 @@ Feature: Test basic network features Then all nodes with last import should answer to last network query by DV Given the DV purchases import from the last query from a DH Then the last import should be the same on DC and DV nodes + Then DV's last purchase's hash should be the same as one manually calculated + Scenario: Smoke check /api/withdraw endpoint Given I setup 1 node @@ -69,8 +71,8 @@ Feature: Test basic network features And I start the node And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 - Then the last import's hash should be the same as one manually calculated - Given DC initiates the replication + Then DC's last import's hash should be the same as one manually calculated + Given DC initiates the replication for last imported dataset And DC waits for replication window to close Given I additionally setup 1 node And I start additional nodes @@ -86,8 +88,8 @@ Feature: Test basic network features And I start the node And I use 1st node as DC And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 - Then the last import's hash should be the same as one manually calculated - Given DC initiates the replication + Then DC's last import's hash should be the same as one manually calculated + Given DC initiates the replication for last imported dataset And DC waits for replication window to close Given I additionally setup 1 node And I start additional nodes @@ -105,6 +107,19 @@ Feature: Test basic network features Then the last import should be the same on DC and DV nodes Then the last import should be the same on DC and DV2 nodes + Scenario: DV should be able to publish network query regardless of the funds + # Start node and let it create own profile. It needs some ETH and TRAC for that. + Given I setup 1 node + And I start the node + And I stop the node + # Spend all the funds and try to query network. + When the 1st node's spend all the Tokens + And the 1st node's spend all the Ethers + And I start the node + And I use 1st node as DV + When DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network + Then everything should be ok + Scenario: API calls should be forbidden Given I setup 1 node And I override configuration for all nodes diff --git a/test/bdd/features/protocol-issues.feature b/test/bdd/features/protocol-issues.feature new file mode 100644 index 0000000000..417373a933 --- /dev/null +++ b/test/bdd/features/protocol-issues.feature @@ -0,0 +1,26 @@ +Feature: Protocol related issues. + + Background: Setup local blockchain and bootstraps + Given the blockchain is set up + And 1 bootstrap is running + + Scenario: Expect publish to work with ghost nodes. + # Use 7 nodes in total - Kadence.APLHA(3) times two plus one DC. + Given I setup 7 nodes + And I start the nodes + And I use 1st node as DC + And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 + Then DC's last import's hash should be the same as one manually calculated + # Stop the node to avoid replication. + When I stop the 7th node + # Start replication to force DC to write fingerprint so DV can buy it. Do not wait to finish. + Given DC initiates the replication for last imported dataset + And I wait for replications to finish + Then the last root hash should be the same as one manually calculated + Then the last import should be the same on all nodes that replicated data + When I stop [2, 3, 4, 5] nodes + And I start the 7th node + And I use 7th node as DV + Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network + And the DV purchases import from the last query from the DC + Then the last import should be the same on DC and DV nodes diff --git a/test/bdd/steps/common.js b/test/bdd/steps/common.js index 112ce2a137..d14d77d338 100644 --- a/test/bdd/steps/common.js +++ b/test/bdd/steps/common.js @@ -14,3 +14,16 @@ Then(/^the (\d+)[st|nd|rd|th]+ node should start normally$/, function (nodeIndex const node = this.state.nodes[nodeIndex - 1]; expect(node.isRunning).to.be.true; }); + +/* + Dummy sentence to check if everything is ok + */ +Then(/^everything should be ok$/, function (done) { + this.state.nodes.forEach((node) => { + expect(node.isRunning).to.be.true; + }); + this.state.bootstraps.forEach((node) => { + expect(node.isRunning).to.be.true; + }); + done(); +}); diff --git a/test/bdd/steps/datalayer.js b/test/bdd/steps/datalayer.js new file mode 100644 index 0000000000..e4dd1fc52a --- /dev/null +++ b/test/bdd/steps/datalayer.js @@ -0,0 +1,99 @@ +/* eslint-disable no-unused-expressions, max-len, no-await-in-loop */ + +const { + Then, +} = require('cucumber'); +const { expect } = require('chai'); + +const httpApiHelper = require('./lib/http-api-helper'); +const utilities = require('./lib/utilities'); +const ImportUtilities = require('../../../modules/ImportUtilities'); + + +Then(/^imported data is compliant with 01_Green_to_pink_shipment.xml file$/, async function () { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); + expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); + expect(!!this.state.lastImport, 'Last import didn\'t happen. Use other step to do it.').to.be.equal(true); + + const { dc } = this.state; + let data; + const myApiImportInfo = await httpApiHelper.apiImportInfo(dc.state.node_rpc_url, this.state.lastImport.data_set_id); + + expect( + utilities.findVertexIdValue(myApiImportInfo.import.vertices, 'IDENTIFIER', 'urn:ot:object:actor:id:Company_Green', 'uid', 'urn:ot:object:actor:id:Company_Green:2018-01-01T01:00:00.000-04:00Z-04:00').length, + 'There should be at least one such vertex', + ).to.be.above(0); + data = { + parent_id: 'urn:epc:id:sgln:Building_Green', + }; + expect( + utilities.findVertexUid(myApiImportInfo.import.vertices, 'LOCATION', 'urn:ot:object:actor:id:Company_Green', 'urn:epc:id:sgln:Building_Green_V2', data).length, + 'There should be at least one such vertex', + ).to.be.above(0); + data = { + category: 'Company', + name: 'Green', + object_class_id: 'Actor', + wallet: '0xBbAaAd7BD40602B78C0649032D2532dEFa23A4C0', + }; + expect( + utilities.findVertexUid(myApiImportInfo.import.vertices, 'ACTOR', 'urn:ot:object:actor:id:Company_Green', 'urn:ot:object:actor:id:Company_Green', data).length, + 'There should be at least one such vertex', + ).to.be.above(0); + data = { + category: 'Beverage', + description: 'Wine Bottle', + object_class_id: 'Product', + }; + expect( + utilities.findVertexUid(myApiImportInfo.import.vertices, 'PRODUCT', 'urn:ot:object:actor:id:Company_Green', 'urn:ot:object:product:id:Product_1', data).length, + 'There should be at least one such vertex', + ).to.be.above(0); + data = { + expirationDate: '2020-31-12T00:01:54Z', + parent_id: 'urn:ot:object:product:id:Product_1', + productId: 'urn:ot:object:product:id:Product_1', + productionDate: '2017-31-12T00:01:54Z', + quantities: { + 'urn:ot:object:actor:id:Company_Green:2018-01-01T01:00:00.000-04:00Z-04:00': { + PCS: '5d3381241af6b16260f680059e9042', + }, + }, + }; + expect( + utilities.findVertexUid(myApiImportInfo.import.vertices, 'BATCH', 'urn:ot:object:actor:id:Company_Green', 'urn:epc:id:sgtin:Batch_1', data).length, + 'There should be at least one such vertex', + ).to.be.above(0); + expect( + utilities.findVertexIdValue(myApiImportInfo.import.vertices, 'IDENTIFIER', 'urn:ot:object:actor:id:Company_Green', 'uid', 'urn:epc:id:sgln:Building_Green').length, + 'There should be at least one such vertex', + ).to.be.above(0); +}); + +Then(/^DC manually calculated datasets data and root hashes matches ones from blockchain$/, async function () { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + + const { dc } = this.state; + const myApiImportsInfo = await httpApiHelper.apiImportsInfo(dc.state.node_rpc_url); + + for (const i in Array.from({ length: myApiImportsInfo.length })) { + const myDataSetId = myApiImportsInfo[i].data_set_id; + const myFingerprint = await httpApiHelper.apiFingerprint(dc.state.node_rpc_url, myDataSetId); + expect(utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); + + + const myEdgesVertices = await httpApiHelper.apiQueryLocalImportByDataSetId(dc.state.node_rpc_url, myDataSetId); + expect(myEdgesVertices, 'Should have corresponding keys').to.have.keys(['edges', 'vertices']); + + const calculatedImportHash = utilities.calculateImportHash(myEdgesVertices); + expect(calculatedImportHash, 'Calculated hashes are different').to.be.equal(myDataSetId); + + // vertices and edges are already sorted from the response + const myMerkle = await ImportUtilities.merkleStructure(myEdgesVertices.vertices.filter(vertex => + vertex.vertex_type !== 'CLASS'), myEdgesVertices.edges); + + expect(myFingerprint.root_hash, 'Fingerprint from API endpoint and manually calculated should match').to.be.equal(myMerkle.tree.getRoot()); + } +}); + diff --git a/test/bdd/steps/endpoints.js b/test/bdd/steps/endpoints.js new file mode 100644 index 0000000000..9c434fc40e --- /dev/null +++ b/test/bdd/steps/endpoints.js @@ -0,0 +1,202 @@ +/* eslint-disable no-unused-expressions, max-len */ + +const { + Then, Given, +} = require('cucumber'); +const { expect } = require('chai'); + +const httpApiHelper = require('./lib/http-api-helper'); + +Given(/^DC imports "([^"]*)" as ([GS1|WOT]+)$/, async function (importFilePath, importType) { + expect(importType, 'importType can only be GS1 or WOT.').to.satisfy(val => (val === 'GS1' || val === 'WOT')); + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); + expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); + + const { dc } = this.state; + const host = dc.state.node_rpc_url; + + + const importResponse = await httpApiHelper.apiImport(host, importFilePath, importType); + + expect(importResponse).to.have.keys(['data_set_id', 'message', 'wallet']); + this.state.lastImport = importResponse; +}); + +Given(/^DC initiates the replication for last imported dataset$/, { timeout: 60000 }, async function () { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); + expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); + expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); + + const { dc } = this.state; + const response = + await httpApiHelper.apiReplication( + dc.state.node_rpc_url, + this.state.lastImport.data_set_id, + ); + + if (!response.replication_id) { + throw Error(`Failed to replicate. Got reply: ${JSON.stringify(response)}`); + } + + this.state.lastReplication = response; +}); + +Given(/^I query ([DC|DH|DV]+) node locally with path: "(\S+)", value: "(\S+)" and opcode: "(\S+)"$/, async function (targetNode, path, value, opcode) { + expect(targetNode, 'Node type can only be DC, DH or DV.').to.satisfy(val => (val === 'DC' || val === 'DH' || val === 'DV')); + expect(opcode, 'Opcode should only be EQ or IN.').to.satisfy(val => (val === 'EQ' || val === 'IN')); + expect(!!this.state[targetNode.toLowerCase()], 'Target node not defined. Use other step to define it.').to.be.equal(true); + + + const host = this.state[targetNode.toLowerCase()].state.node_rpc_url; + + const jsonQuery = { + query: + [ + { + path, + value, + opcode, + }, + ], + }; + const response = await httpApiHelper.apiQueryLocal(host, jsonQuery); + this.state.apiQueryLocalResponse = response; +}); + +Given(/^I query ([DC|DH|DV]+) node locally for last imported data set id$/, async function (targetNode) { + expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); + expect(!!this.state.lastImport.data_set_id, 'Last imports data set id seems not defined').to.be.equal(true); + expect(targetNode, 'Node type can only be DC, DH or DV.').to.satisfy(val => (val === 'DC' || val === 'DH' || val === 'DV')); + expect(!!this.state[targetNode.toLowerCase()], 'Target node not defined. Use other step to define it.').to.be.equal(true); + + const host = this.state[targetNode.toLowerCase()].state.node_rpc_url; + const lastDataSetId = this.state.lastImport.data_set_id; + + const response = await httpApiHelper.apiQueryLocalImportByDataSetId(host, lastDataSetId); + this.state.apiQueryLocalImportByDataSetIdResponse = response; +}); + +Given(/^([DV|DV2]+) publishes query consisting of path: "(\S+)", value: "(\S+)" and opcode: "(\S+)" to the network$/, { timeout: 90000 }, async function (whichDV, path, value, opcode) { + expect(!!this.state[whichDV.toLowerCase()], 'DV/DV2 node not defined. Use other step to define it.').to.be.equal(true); + expect(opcode, 'Opcode should only be EQ or IN.').to.satisfy(val => (val === 'EQ' || val === 'IN')); + const dv = this.state[whichDV.toLowerCase()]; + + // TODO find way to pass jsonQuery directly to step definition + const jsonQuery = { + query: + [ + { + path, + value, + opcode, + }, + ], + }; + const queryNetworkResponse = + await httpApiHelper.apiQueryNetwork(dv.state.node_rpc_url, jsonQuery); + expect(Object.keys(queryNetworkResponse), 'Reponse should have message and query_id').to.have.members(['message', 'query_id']); + expect(queryNetworkResponse.message, 'Message should inform about successful sending of the query').to.be.equal('Query sent successfully.'); + this.state.lastQueryNetworkId = queryNetworkResponse.query_id; + return new Promise((accept, reject) => dv.once('dv-network-query-processed', () => accept())); +}); + +Given(/^the ([DV|DV2]+) purchases import from the last query from (a DH|the DC|a DV)$/, function (whichDV, fromWhom, done) { + expect(whichDV, 'Query can be made either by DV or DV2.').to.satisfy(val => (val === 'DV' || val === 'DV2')); + expect(!!this.state[whichDV.toLowerCase()], 'DV/DV2 node not defined. Use other step to define it.').to.be.equal(true); + expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); + expect(this.state.lastQueryNetworkId, 'Query not published yet.').to.not.be.undefined; + + const { dc } = this.state; + const dv = this.state[whichDV.toLowerCase()]; + const queryId = this.state.lastQueryNetworkId; + const dataSetId = this.state.lastImport.data_set_id; + let sellerNode; + + const confirmationsSoFar = + dv.nodeConfirmsForDataSetId(queryId, dataSetId); + + expect(confirmationsSoFar).to.have.length.greaterThan(0); + + if (fromWhom === 'a DH') { + // Find first DH that replicated last import. + sellerNode = this.state.nodes.find(node => (node !== dc && node !== dv)); + } else if (fromWhom === 'the DC') { + sellerNode = dc; + } else if (fromWhom === 'a DV') { + if (whichDV === 'DV') { + console.log('DV cant buy from DV'); + process.exit(-1); + } + sellerNode = this.state.dv; + } + + expect(sellerNode, 'Didn\'t find seller node.').to.not.be.undefined; + const { replyId } = + dv.state.dataLocationQueriesConfirmations[queryId][sellerNode.state.identity]; + + expect(replyId).to.not.be.undefined; + + // Wait for purchase to happened and then exit. + dv.once('dataset-purchase', (purchase) => { + if (purchase.queryId === queryId && + purchase.replyId === replyId && + purchase.dataSetId === dataSetId) { + this.logger.info(`${dv.state.identity} finished purchase for data-set ID ${dataSetId} from sellerNode ${sellerNode.state.identity}`); + done(); + } + }); + + // Initiate actual purchase. + httpApiHelper.apiReadNetwork(dv.state.node_rpc_url, queryId, replyId, dataSetId) + .catch(error => done(error)); +}); + +Given(/^I attempt to withdraw (\d+) tokens from DC profile[s]*$/, { timeout: 420000 }, async function (tokenCount) { + // TODO expect tokenCount < profileBalance + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + + const { dc } = this.state; + const host = dc.state.node_rpc_url; + + const promises = []; + promises.push(new Promise((accept, reject) => { + dc.once('withdraw-initiated', () => accept()); + })); + promises.push(new Promise((accept, reject) => { + dc.once('withdraw-completed', () => accept()); + })); + promises.push(new Promise((accept, reject) => { + dc.once('withdraw-command-completed', () => accept()); + })); + await httpApiHelper.apiWithdraw(host, tokenCount); + return Promise.all(promises); +}); + +Given(/^I attempt to deposit (\d+) tokens from DC wallet[s]*$/, { timeout: 120000 }, async function (tokenCount) { + // TODO expect tokenCount < walletBalance + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + const { dc } = this.state; + const host = dc.state.node_rpc_url; + + const promises = []; + promises.push(new Promise((accept, reject) => { + dc.once('deposit-approved', () => accept()); + })); + promises.push(new Promise((accept, reject) => { + dc.once('deposit-command-completed', () => accept()); + })); + await httpApiHelper.apiDeposit(host, tokenCount); + return Promise.all(promises); +}); + +Given(/^DC calls consensus endpoint for sender: "(\S+)"$/, async function (senderId) { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + const { dc } = this.state; + const host = dc.state.node_rpc_url; + + const consensusResponse = await httpApiHelper.apiConsensus(host, senderId); + expect(consensusResponse, 'Should have key called events').to.have.all.keys('events'); + this.state.lastConsensusResponse = consensusResponse; +}); diff --git a/test/bdd/steps/hooks.js b/test/bdd/steps/hooks.js index 7a841dc91c..2666ebbd45 100644 --- a/test/bdd/steps/hooks.js +++ b/test/bdd/steps/hooks.js @@ -2,6 +2,7 @@ require('dotenv').config(); const { Database } = require('arangojs'); const rc = require('rc'); +const rimraf = require('rimraf'); const defaultConfig = require('../../../config/config.json').development; const pjson = require('../../../package.json'); @@ -38,17 +39,23 @@ After(function (testCase, done) { this.logger.log('with status: ', testCase.result.status, ' and duration: ', testCase.result.duration, ' miliseconds.'); if (testCase.result.status === 'failed') { - this.logger.log('Oops, exception occured:'); + this.logger.log('Oops, exception occurred:'); this.logger.log(testCase.result.exception); } // Clean. const nodesWaits = [...this.state.nodes, ...this.state.bootstraps] + .filter(node => node.isRunning) .map(node => new Promise((accept, reject) => { node.on('finished', (code) => { if (code === 0) { - accept(); + if (this.parameters.keepFailedArtifacts && + testCase.result.status === 'passed') { + rimraf(node.options.configDir, () => accept()); + } else { + accept(); + } } else { reject(); } @@ -60,13 +67,17 @@ After(function (testCase, done) { this.state.localBlockchain.server.close(); } - Promise.all(nodesWaits); - - this.state.localBlockchain = null; - this.state.nodes = []; - this.state.bootstraps = []; - - done(); + Promise.all(nodesWaits).then(() => { + this.state.localBlockchain = null; + this.state.nodes = []; + this.state.bootstraps = []; + done(); + }).catch((error) => { + this.logger.error(error); + this.state.localBlockchain = null; + this.state.nodes = []; + this.state.bootstraps = []; + }); }); AfterAll(async function () { diff --git a/test/bdd/steps/lib/http-api-helper.js b/test/bdd/steps/lib/http-api-helper.js index a20309a68d..afc5563101 100644 --- a/test/bdd/steps/lib/http-api-helper.js +++ b/test/bdd/steps/lib/http-api-helper.js @@ -209,16 +209,16 @@ async function apiQueryLocal(nodeRpcUrl, jsonQuery) { * Fetch /api/query/local/import/{{data_set_id}} * * @param {string} nodeRpcUrl URL in following format http://host:port - * @param {string} importId ID. + * @param {string} dataSetId ID. * @return {Promise.} */ -async function apiQueryLocalImportByDataSetId(nodeRpcUrl, importId) { +async function apiQueryLocalImportByDataSetId(nodeRpcUrl, dataSetId) { return new Promise((accept, reject) => { request( { method: 'GET', headers: { 'Content-Type': 'application/json' }, - uri: `${nodeRpcUrl}/api/query/local/import/${importId}`, + uri: `${nodeRpcUrl}/api/query/local/import/${dataSetId}`, json: true, }, (err, res, body) => { diff --git a/test/bdd/steps/lib/otnode.js b/test/bdd/steps/lib/otnode.js index 001a9c69a4..75e432a468 100644 --- a/test/bdd/steps/lib/otnode.js +++ b/test/bdd/steps/lib/otnode.js @@ -26,12 +26,12 @@ const walletAmountRegex = /\b\d+\b/g; * One instance of OtNode class handles one running node. */ class OtNode extends EventEmitter { - constructor({ logger, nodeConfiguration }) { + constructor({ logger, nodeConfiguration, appDataBaseDir }) { super(); this.id = uuidv4(); this.options = {}; - this.options.configDir = path.join(tmpdir, this.id); + this.options.configDir = path.join(appDataBaseDir || tmpdir, this.id); this.options.nodeConfiguration = nodeConfiguration || {}; this.options.nodeConfiguration = deepExtend( Object.assign({}, defaultConfiguration), // deepExtend changes original object. @@ -288,6 +288,8 @@ class OtNode extends EventEmitter { this.emit('deposit-command-completed'); } else if (line.match(/Replication window for .+ is closed\. Replicated to .+ peers\. Verified .+\./gi)) { this.emit('replication-window-closed'); + } else if (line.match(/Offer with internal ID .+ for data set .+ written to blockchain. Waiting for DHs\.\.\./gi)) { + this.emit('offer-written-blockchain'); } } diff --git a/test/bdd/steps/lib/utilities.js b/test/bdd/steps/lib/utilities.js index af5c02bea7..6543e64d65 100644 --- a/test/bdd/steps/lib/utilities.js +++ b/test/bdd/steps/lib/utilities.js @@ -2,6 +2,7 @@ const sortedStringify = require('sorted-json-stringify'); const { sha3_256 } = require('js-sha3'); const _ = require('lodash'); +const BN = require('bn.js'); function calculateImportHash(data) { @@ -58,6 +59,16 @@ function findVertexUid(verticesArray, vertex_type, sender_id, uid, data) { return response; } +/** + * Checks if hash is zero or any given hex string regardless of prefix 0x + * @param {string} hash + */ +function isZeroHash(hash) { + const num = new BN(this.denormalizeHex(hash)); + + return num.eqn(0); +} + module.exports = { calculateImportHash, @@ -65,4 +76,5 @@ module.exports = { denormalizeHex, findVertexIdValue, findVertexUid, + isZeroHash, }; diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js index ea1a64971a..25082ff6ee 100644 --- a/test/bdd/steps/network.js +++ b/test/bdd/steps/network.js @@ -6,12 +6,9 @@ const { const { expect } = require('chai'); const uuidv4 = require('uuid/v4'); const request = require('request'); -const sleep = require('sleep-async')().Promise; const { deepEqual } = require('jsprim'); -const deepExtend = require('deep-extend'); const OtNode = require('./lib/otnode'); -const Utilities = require('../../../modules/Utilities'); const ImportUtilities = require('../../../modules/ImportUtilities'); const LocalBlockchain = require('./lib/local-blockchain'); const httpApiHelper = require('./lib/http-api-helper'); @@ -87,6 +84,7 @@ Given(/^(\d+) bootstrap is running$/, { timeout: 80000 }, function (nodeCount, d remoteWhitelist: ['localhost', '127.0.0.1'], }, }, + appDataBaseDir: this.parameters.appDataBaseDir, }); bootstrapNode.options.identity = bootstrapIdentity.ff62cb1f692431d901833d55b93c7d991b4087f1; @@ -126,6 +124,7 @@ Given(/^I setup (\d+) node[s]*$/, { timeout: 120000 }, function (nodeCount, done const newNode = new OtNode({ nodeConfiguration, + appDataBaseDir: this.parameters.appDataBaseDir, }); this.state.nodes.push(newNode); newNode.initialize(); @@ -156,7 +155,7 @@ Given(/^I start the node[s]*$/, { timeout: 3000000 }, function (done) { Promise.all(nodesStarts).then(() => done()); }); -Given(/^I stop the nodes[s]*$/, { timeout: 3000000 }, function () { +Given(/^I stop the node[s]*$/, { timeout: 3000000 }, function () { expect(this.state.bootstraps.length).to.be.greaterThan(0); expect(this.state.nodes.length).to.be.greaterThan(0); @@ -173,6 +172,63 @@ Given(/^I stop the nodes[s]*$/, { timeout: 3000000 }, function () { return Promise.all(nodesStops); }); +Given(/^I start the (\d+)[st|nd|rd|th]+ node$/, { timeout: 3000000 }, function (nodeIndex) { + expect(nodeIndex, 'Invalid index.').to.be.within(0, this.state.nodes.length); + expect(this.state.bootstraps.length).to.be.greaterThan(0); + expect(this.state.nodes.length).to.be.greaterThan(0); + + const node = this.state.nodes[nodeIndex - 1]; + expect(node.isRunning, 'Node should not work.').to.be.false; + node.start(); + + return new Promise((accept, reject) => { + node.once('initialized', () => accept()); + node.once('error', reject); + }); +}); + + +Given(/^I stop the (\d+)[st|nd|rd|th]+ node$/, { timeout: 3000000 }, function (nodeIndex) { + expect(nodeIndex, 'Invalid index.').to.be.within(0, this.state.nodes.length); + expect(this.state.bootstraps.length).to.be.greaterThan(0); + expect(this.state.nodes.length).to.be.greaterThan(0); + + const node = this.state.nodes[nodeIndex - 1]; + expect(node.isRunning, 'Node should work.').to.be.true; + node.stop(); + + return new Promise((accept, reject) => { + node.once('finished', () => accept()); + node.once('error', reject); + }); +}); + +Given(/^I stop \[(.+)\] nodes[s]*$/, { timeout: 3000000 }, function (nodeIndicesString) { + expect(this.state.bootstraps.length).to.be.greaterThan(0); + expect(this.state.nodes.length).to.be.greaterThan(0); + + const nodeIndices = JSON.parse(`[${nodeIndicesString}]`); + expect(nodeIndices, 'Provide at least one index.').to.have.lengthOf.above(0); + expect(nodeIndices, 'Indices out boundaries.').to + .satisfy(indices => indices.reduce((acc, index) => (index - 1 >= 0 && index <= this.state.nodes.length), true)); + expect(nodeIndices, 'Node expected to be running.').to + .satisfy(indices => indices.reduce((acc, index) => this.state.nodes[index - 1].isRunning, true)); + + const nodesStops = []; + + nodeIndices.forEach((index) => { + const node = this.state.nodes[index - 1]; + nodesStops.push(new Promise((accept, reject) => { + node.once('finished', () => accept()); + node.once('error', reject); + })); + node.stop(); + }); + + return Promise.all(nodesStops); +}); + + Then(/^all nodes should be aware of each other$/, function (done) { expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); @@ -216,31 +272,16 @@ Given(/^I use (\d+)[st|nd|rd|th]+ node as ([DC|DH|DV|DV2]+)$/, function (nodeInd this.state[nodeType.toLowerCase()] = this.state.nodes[nodeIndex - 1]; }); -Given(/^DC imports "([^"]*)" as ([GS1|WOT]+)$/, async function (importFilePath, importType) { - expect(importType, 'importType can only be GS1 or WOT.').to.satisfy(val => (val === 'GS1' || val === 'WOT')); - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); - expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); - - const { dc } = this.state; - const host = dc.state.node_rpc_url; - - - const importResponse = await httpApiHelper.apiImport(host, importFilePath, importType); - - expect(importResponse).to.have.keys(['data_set_id', 'message', 'wallet']); - this.state.lastImport = importResponse; -}); - -Then(/^the last import's hash should be the same as one manually calculated$/, async function () { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); +Then(/^([DC|DV]+)'s last [import|purchase]+'s hash should be the same as one manually calculated$/, async function (nodeType) { + expect(nodeType, 'Node type can only be DC or DV.').to.satisfy(val => (val === 'DC' || val === 'DV')); + expect(!!this.state[nodeType.toLowerCase()], 'DC/DV node not defined. Use other step to define it.').to.be.equal(true); expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); expect(!!this.state.lastImport, 'Last import didn\'t happen. Use other step to do it.').to.be.equal(true); - const { dc } = this.state; + const myNode = this.state[nodeType.toLowerCase()]; - const response = await httpApiHelper.apiImportInfo(dc.state.node_rpc_url, this.state.lastImport.data_set_id); + const response = await httpApiHelper.apiImportInfo(myNode.state.node_rpc_url, this.state.lastImport.data_set_id); expect(response, 'response should contain root_hash, import, transaction and data_provider_wallet keys').to.have.keys([ 'root_hash', 'import', @@ -264,7 +305,7 @@ Then(/^the last root hash should be the same as one manually calculated$/, async const myFingerprint = await httpApiHelper.apiFingerprint(dc.state.node_rpc_url, this.state.lastImport.data_set_id); expect(myFingerprint).to.have.keys(['root_hash']); - expect(Utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); + expect(utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); const myApiImportInfo = await httpApiHelper.apiImportInfo(dc.state.node_rpc_url, this.state.lastImport.data_set_id); @@ -275,87 +316,6 @@ Then(/^the last root hash should be the same as one manually calculated$/, async expect(myFingerprint.root_hash, 'Fingerprint from API endpoint and manually calculated should match').to.be.equal(myMerkle.tree.getRoot()); }); -Then(/^imported data is compliant with 01_Green_to_pink_shipment.xml file$/, async function () { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); - expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); - expect(!!this.state.lastImport, 'Last import didn\'t happen. Use other step to do it.').to.be.equal(true); - - const { dc } = this.state; - let data; - const myApiImportInfo = await httpApiHelper.apiImportInfo(dc.state.node_rpc_url, this.state.lastImport.data_set_id); - - expect( - utilities.findVertexIdValue(myApiImportInfo.import.vertices, 'IDENTIFIER', 'urn:ot:object:actor:id:Company_Green', 'uid', 'urn:ot:object:actor:id:Company_Green:2018-01-01T01:00:00.000-04:00Z-04:00').length, - 'There should be at least one such vertex', - ).to.be.above(0); - data = { - parent_id: 'urn:epc:id:sgln:Building_Green', - }; - expect( - utilities.findVertexUid(myApiImportInfo.import.vertices, 'LOCATION', 'urn:ot:object:actor:id:Company_Green', 'urn:epc:id:sgln:Building_Green_V2', data).length, - 'There should be at least one such vertex', - ).to.be.above(0); - data = { - category: 'Company', - name: 'Green', - object_class_id: 'Actor', - wallet: '0xBbAaAd7BD40602B78C0649032D2532dEFa23A4C0', - }; - expect( - utilities.findVertexUid(myApiImportInfo.import.vertices, 'ACTOR', 'urn:ot:object:actor:id:Company_Green', 'urn:ot:object:actor:id:Company_Green', data).length, - 'There should be at least one such vertex', - ).to.be.above(0); - data = { - category: 'Beverage', - description: 'Wine Bottle', - object_class_id: 'Product', - }; - expect( - utilities.findVertexUid(myApiImportInfo.import.vertices, 'PRODUCT', 'urn:ot:object:actor:id:Company_Green', 'urn:ot:object:product:id:Product_1', data).length, - 'There should be at least one such vertex', - ).to.be.above(0); - data = { - expirationDate: '2020-31-12T00:01:54Z', - parent_id: 'urn:ot:object:product:id:Product_1', - productId: 'urn:ot:object:product:id:Product_1', - productionDate: '2017-31-12T00:01:54Z', - quantities: { - 'urn:ot:object:actor:id:Company_Green:2018-01-01T01:00:00.000-04:00Z-04:00': { - PCS: '5d3381241af6b16260f680059e9042', - }, - }, - }; - expect( - utilities.findVertexUid(myApiImportInfo.import.vertices, 'BATCH', 'urn:ot:object:actor:id:Company_Green', 'urn:epc:id:sgtin:Batch_1', data).length, - 'There should be at least one such vertex', - ).to.be.above(0); - expect( - utilities.findVertexIdValue(myApiImportInfo.import.vertices, 'IDENTIFIER', 'urn:ot:object:actor:id:Company_Green', 'uid', 'urn:epc:id:sgln:Building_Green').length, - 'There should be at least one such vertex', - ).to.be.above(0); -}); - -Given(/^DC initiates the replication$/, { timeout: 60000 }, async function () { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); - expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); - expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); - - const { dc } = this.state; - const response = - await httpApiHelper.apiReplication( - dc.state.node_rpc_url, - this.state.lastImport.data_set_id, - ); - - if (!response.replication_id) { - throw Error(`Failed to replicate. Got reply: ${JSON.stringify(response)}`); - } - - this.state.lastReplication = response; -}); - Given(/^I wait for replication[s] to finish$/, { timeout: 1200000 }, function () { expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); @@ -367,12 +327,14 @@ Given(/^I wait for replication[s] to finish$/, { timeout: 1200000 }, function () // All nodes including DC emit offer-finalized. this.state.nodes.forEach((node) => { - promises.push(new Promise((acc) => { - node.once('offer-finalized', (offerId) => { - // TODO: Change API to connect internal offer ID and external offer ID. - acc(); - }); - })); + if (node.isRunning) { + promises.push(new Promise((acc) => { + node.once('offer-finalized', (offerId) => { + // TODO: Change API to connect internal offer ID and external offer ID. + acc(); + }); + })); + } }); return Promise.all(promises); @@ -387,8 +349,11 @@ Then(/^the last import should be the same on all nodes that replicated data$/, a const { dc } = this.state; - // Expect everyone to have data - expect(dc.state.replications.length, 'Not every node replicated data.').to.equal(this.state.nodes.length - 1); + // Expect everyone running to have data + expect( + dc.state.replications.length, + 'Not every node replicated data.', + ).to.equal(this.state.nodes.reduce((acc, node) => acc + node.isRunning, -1)); // Start from -1. DC is not counted. // Get offer ID for last import. const lastOfferId = @@ -501,7 +466,7 @@ Given(/^I remember previous import's fingerprint value$/, async function () { this.state.lastImport.data_set_id, ); expect(myFingerprint).to.have.keys(['root_hash']); - expect(Utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); + expect(utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); // TODO need better namings this.state.lastMinusOneImportFingerprint = myFingerprint; @@ -521,7 +486,7 @@ Then(/^checking again first import's root hash should point to remembered value$ this.state.lastMinusOneImport.data_set_id, ); expect(firstImportFingerprint).to.have.keys(['root_hash']); - expect(Utilities.isZeroHash(firstImportFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); + expect(utilities.isZeroHash(firstImportFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false); expect(firstImportFingerprint.root_hash) .to.be.equal(this.state.lastMinusOneImportFingerprint.root_hash); @@ -531,41 +496,6 @@ Then(/^checking again first import's root hash should point to remembered value$ ).to.be.equal(true); }); -Given(/^I query ([DC|DH|DV]+) node locally with path: "(\S+)", value: "(\S+)" and opcode: "(\S+)"$/, async function (targetNode, path, value, opcode) { - expect(targetNode, 'Node type can only be DC, DH or DV.').to.satisfy(val => (val === 'DC' || val === 'DH' || val === 'DV')); - expect(opcode, 'Opcode should only be EQ or IN.').to.satisfy(val => (val === 'EQ' || val === 'IN')); - expect(!!this.state[targetNode.toLowerCase()], 'Target node not defined. Use other step to define it.').to.be.equal(true); - - - const host = this.state[targetNode.toLowerCase()].state.node_rpc_url; - - const jsonQuery = { - query: - [ - { - path, - value, - opcode, - }, - ], - }; - const response = await httpApiHelper.apiQueryLocal(host, jsonQuery); - this.state.apiQueryLocalResponse = response; -}); - -Given(/^I query ([DC|DH|DV]+) node locally for last imported data set id$/, async function (targetNode) { - expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); - expect(!!this.state.lastImport.data_set_id, 'Last imports data set id seems not defined').to.be.equal(true); - expect(targetNode, 'Node type can only be DC, DH or DV.').to.satisfy(val => (val === 'DC' || val === 'DH' || val === 'DV')); - expect(!!this.state[targetNode.toLowerCase()], 'Target node not defined. Use other step to define it.').to.be.equal(true); - - const host = this.state[targetNode.toLowerCase()].state.node_rpc_url; - const lastDataSetId = this.state.lastImport.data_set_id; - - const response = await httpApiHelper.apiQueryLocalImportByDataSetId(host, lastDataSetId); - this.state.apiQueryLocalImportByDataSetIdResponse = response; -}); - Then(/^response should contain only last imported data set id$/, function () { expect(!!this.state.apiQueryLocalResponse, 'apiQueryLocal should have given some result').to.be.equal(true); @@ -609,6 +539,7 @@ Given(/^I additionally setup (\d+) node[s]*$/, { timeout: 60000 }, function (nod }, local_network_only: true, }, + appDataBaseDir: this.parameters.appDataBaseDir, }); this.state.nodes.push(newNode); newNode.initialize(); @@ -635,30 +566,6 @@ Given(/^I start additional node[s]*$/, { timeout: 60000 }, function () { return Promise.all(additionalNodesStarts); }); -Given(/^([DV|DV2]+) publishes query consisting of path: "(\S+)", value: "(\S+)" and opcode: "(\S+)" to the network$/, { timeout: 90000 }, async function (whichDV, path, value, opcode) { - expect(!!this.state[whichDV.toLowerCase()], 'DV/DV2 node not defined. Use other step to define it.').to.be.equal(true); - expect(opcode, 'Opcode should only be EQ or IN.').to.satisfy(val => (val === 'EQ' || val === 'IN')); - const dv = this.state[whichDV.toLowerCase()]; - - // TODO find way to pass jsonQuery directly to step definition - const jsonQuery = { - query: - [ - { - path, - value, - opcode, - }, - ], - }; - const queryNetworkResponse = - await httpApiHelper.apiQueryNetwork(dv.state.node_rpc_url, jsonQuery); - expect(Object.keys(queryNetworkResponse), 'Reponse should have message and query_id').to.have.members(['message', 'query_id']); - expect(queryNetworkResponse.message, 'Message should inform about successful sending of the query').to.be.equal('Query sent successfully.'); - this.state.lastQueryNetworkId = queryNetworkResponse.query_id; - return new Promise((accept, reject) => dv.once('dv-network-query-processed', () => accept())); -}); - Then(/^all nodes with last import should answer to last network query by ([DV|DV2]+)$/, { timeout: 90000 }, async function (whichDV) { expect(!!this.state[whichDV.toLowerCase()], 'DV/DV2 node not defined. Use other step to define it.').to.be.equal(true); expect(this.state.lastQueryNetworkId, 'Query not published yet.').to.not.be.undefined; @@ -711,78 +618,6 @@ Then(/^all nodes with last import should answer to last network query by ([DV|DV }); }); -Given(/^the ([DV|DV2]+) purchases import from the last query from (a DH|the DC|a DV)$/, function (whichDV, fromWhom, done) { - expect(whichDV, 'Query can be made either by DV or DV2.').to.satisfy(val => (val === 'DV' || val === 'DV2')); - expect(!!this.state[whichDV.toLowerCase()], 'DV/DV2 node not defined. Use other step to define it.').to.be.equal(true); - expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); - expect(this.state.lastQueryNetworkId, 'Query not published yet.').to.not.be.undefined; - - const { dc } = this.state; - const dv = this.state[whichDV.toLowerCase()]; - const queryId = this.state.lastQueryNetworkId; - const dataSetId = this.state.lastImport.data_set_id; - let sellerNode; - - const confirmationsSoFar = - dv.nodeConfirmsForDataSetId(queryId, dataSetId); - - expect(confirmationsSoFar).to.have.length.greaterThan(0); - - if (fromWhom === 'a DH') { - // Find first DH that replicated last import. - sellerNode = this.state.nodes.find(node => (node !== dc && node !== dv)); - } else if (fromWhom === 'the DC') { - sellerNode = dc; - } else if (fromWhom === 'a DV') { - if (whichDV === 'DV') { - console.log('DV cant buy from DV'); - process.exit(-1); - } - sellerNode = this.state.dv; - } - - expect(sellerNode, 'Didn\'t find seller node.').to.not.be.undefined; - const { replyId } = - dv.state.dataLocationQueriesConfirmations[queryId][sellerNode.state.identity]; - - expect(replyId).to.not.be.undefined; - - // Wait for purchase to happened and then exit. - dv.once('dataset-purchase', (purchase) => { - if (purchase.queryId === queryId && - purchase.replyId === replyId && - purchase.dataSetId === dataSetId) { - this.logger.info(`${dv.state.identity} finished purchase for data-set ID ${dataSetId} from sellerNode ${sellerNode.state.identity}`); - done(); - } - }); - - // Initiate actual purchase. - httpApiHelper.apiReadNetwork(dv.state.node_rpc_url, queryId, replyId, dataSetId) - .catch(error => done(error)); -}); - -Given(/^I attempt to withdraw (\d+) tokens from DC profile[s]*$/, { timeout: 420000 }, async function (tokenCount) { - // TODO expect tokenCount < profileBalance - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - - const { dc } = this.state; - const host = dc.state.node_rpc_url; - - const promises = []; - promises.push(new Promise((accept, reject) => { - dc.once('withdraw-initiated', () => accept()); - })); - promises.push(new Promise((accept, reject) => { - dc.once('withdraw-completed', () => accept()); - })); - promises.push(new Promise((accept, reject) => { - dc.once('withdraw-command-completed', () => accept()); - })); - await httpApiHelper.apiWithdraw(host, tokenCount); - return Promise.all(promises); -}); - Then(/^DC wallet and DC profile balances should diff by (\d+) with rounding error of (\d+.\d{1,2})$/, function (tokenDiff, roundingError) { expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); const { dc } = this.state; @@ -798,33 +633,6 @@ Then(/^DC wallet and DC profile balances should diff by (\d+) with rounding erro expect(Math.abs(dc.state.newWalletBalance - dc.state.oldWalletBalance) > lowerLimit, 'Wallet diff should be approx equal to withdrawal amount').to.be.true; }); -Given(/^I attempt to deposit (\d+) tokens from DC wallet[s]*$/, { timeout: 120000 }, async function (tokenCount) { - // TODO expect tokenCount < walletBalance - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - const { dc } = this.state; - const host = dc.state.node_rpc_url; - - const promises = []; - promises.push(new Promise((accept, reject) => { - dc.once('deposit-approved', () => accept()); - })); - promises.push(new Promise((accept, reject) => { - dc.once('deposit-command-completed', () => accept()); - })); - await httpApiHelper.apiDeposit(host, tokenCount); - return Promise.all(promises); -}); - -Given(/^DC calls consensus endpoint for sender: "(\S+)"$/, async function (senderId) { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - const { dc } = this.state; - const host = dc.state.node_rpc_url; - - const consensusResponse = await httpApiHelper.apiConsensus(host, senderId); - expect(consensusResponse, 'Should have key called events').to.have.all.keys('events'); - this.state.lastConsensusResponse = consensusResponse; -}); - Then(/^last consensus response should have (\d+) event with (\d+) match[es]*$/, function (eventsCount, matchesCount) { expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); expect(this.state.lastConsensusResponse, 'lastConsensusResponse should be already defined').to.not.be.undefined; @@ -857,6 +665,20 @@ Given(/^DC waits for replication window to close$/, { timeout: 180000 }, functio }); }); +Given(/^DC waits for last offer to get written to blockchain$/, { timeout: 180000 }, function (done) { + expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true); + expect(!!this.state.lastReplication, 'Nothing was replicated. Use other step to do it.').to.be.equal(true); + expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0); + expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0); + + const { dc } = this.state; + + dc.once('offer-written-blockchain', () => { + done(); + }); +}); + Given(/^API calls will be forbidden/, { timeout: 180000 }, function (done) { const { dc } = this.state;