diff --git a/.travis.yml b/.travis.yml index b2f95b4ed0..ec9be54054 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,9 @@ language: node_js node_js: - "9" env: - - NODE_ENV=development ARTIFACTS_DIR=$TRAVIS_BUILD_DIR/artifacts CUCUMBER_ARTIFACTS_DIR=$ARTIFACTS_DIR/cucumber + - NODE_ENV=development ARTIFACTS_DIR=$TRAVIS_BUILD_DIR/artifacts CUCUMBER_ARTIFACTS_DIR=$ARTIFACTS_DIR/cucumber BDD_SECTION=first + - NODE_ENV=development ARTIFACTS_DIR=$TRAVIS_BUILD_DIR/artifacts CUCUMBER_ARTIFACTS_DIR=$ARTIFACTS_DIR/cucumber BDD_SECTION=second + sudo: enabled addons: apt: @@ -23,21 +25,27 @@ before_script: - npm install -g truffle@5.0.0-beta.1 &> /dev/null script: - npm run lint - - if [ "$TRAVIS_EVENT_TYPE" != "push" ]; then - npm run test:bdd:dryrun - npm run test:bdd -- --world-parameters '{"appDataBaseDir":"$CUCUMBER_ARTIFACTS_DIR","keepFailedArtifacts":true}'; + - if [ "$TRAVIS_EVENT_TYPE" == "pull_request" ]; then + npm run test:bdd:dryrun; + fi + - if [[ ("$TRAVIS_EVENT_TYPE" == "pull_request" && "$BDD_SECTION" == "first" ) || ( "$TRAVIS_EVENT_TYPE" == "cron" && "$BDD_SECTION" == "first" ) ]]; then + npm run test:bdd:first -- --world-parameters '{"appDataBaseDir":"$CUCUMBER_ARTIFACTS_DIR","keepFailedArtifacts":true}'; + fi + - if [[ ("$TRAVIS_EVENT_TYPE" == "pull_request" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "cron" && "$BDD_SECTION" == "second" ) ]]; then + npm run test:bdd:second -- --world-parameters '{"appDataBaseDir":"$CUCUMBER_ARTIFACTS_DIR","keepFailedArtifacts":true}'; + fi + - if [[ ( "$TRAVIS_EVENT_TYPE" == "pull_request" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "push" && "$BDD_SECTION" == "first" ) || ( "$TRAVIS_EVENT_TYPE" == "cron" && "$BDD_SECTION" == "first" ) ]]; then + npm test 2> $ARTIFACTS_DIR/mocha-logs.log; fi - # checks for arangodb based solution - - npm test 2> $ARTIFACTS_DIR/mocha-logs.log - - npm start &> $ARTIFACTS_DIR/app-start.log & - - sleep 10 - - jobs - - if [ -n "$(jobs -p)" ]; then kill %1; fi # compile and check Smart Contracts - ganache-cli -i 5777 -p 7545 -l 10000000 -m "aspect ask story desert profit engage tuition leave fade giraffe exclude brief" &> $ARTIFACTS_DIR/ganache.log & - cd modules/Blockchain/Ethereum - - truffle test --network test > $ARTIFACTS_DIR/truffle-test.log - - rm -rf build && truffle migrate --reset --compile-all --network ganache > $ARTIFACTS_DIR/truffle-migrate.log + - if [[ ( "$TRAVIS_EVENT_TYPE" == "pull_request" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "push" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "cron" && "$BDD_SECTION" == "second" ) ]]; then + truffle test --network test > $ARTIFACTS_DIR/truffle-test.log; + fi + - if [[ ( "$TRAVIS_EVENT_TYPE" == "pull_request" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "push" && "$BDD_SECTION" == "second" ) || ( "$TRAVIS_EVENT_TYPE" == "cron" && "$BDD_SECTION" == "second" ) ]]; then + rm -rf build && truffle migrate --reset --compile-all --network ganache > $ARTIFACTS_DIR/truffle-migrate.log; + fi - cd $TRAVIS_BUILD_DIR - jobs - kill -9 %1 diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index a9780f6a6a..0000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,253 +0,0 @@ -# Changelog - -## V1.1.0b release notes (20/07/2018) -# V1.1.0b release notes -## Release date: July 20th, 2018 - -This is the first major version increment during the TestNet beta phase. It includes lots of fixes and significantly improved network stability and flow between nodes. - -## New features and fixes: -- Network fixes -- OT flow fixes (challenges, etc.) -- Improved logging -- Improved docker image -- Auto-update fixes -- Houston features -- Increased replication ---- - -## V1.0b “Apollo” release notes (29/06/2018) -# V1.0b “Apollo” release notes -## Release date: June 290th, 2018 - -Apollo is the first beta version of the testnet protocol and implements all the features set for development. With the growing usage of the protocol and identified improvement proposals, we will be moving towards a mainnet launch in Q3. - - -## New features and fixes: -- Exposed API for local node operations -- Upgraded and improved network read flow -- New version of documentation at http://docs.origintrail.io -- Several bugs fixed ---- - -## V1.0b-RC “Lunar Orbiter” release notes (18/06/2018) -# V1.0b-RC “Lunar Orbiter” release notes -## Release date: June 18th, 2018 - -### The Lunar Orbiter release -Lunar Orbiter takes the alpha phase to the finish line nicely by implementing two final important improvements: the latest version of the payment mechanism, and version two of the zero-knowledge privacy layer logic. The payment mechanism is now extended to support the ability to perform trustless, monetized data reading from the OriginTrail Decentralized Network (ODN). In this way, the data creator (DC) and data holder (DH) nodes will be able to charge a fee from data viewer (DV) nodes, which would read data from them in order to provide them with the requested data. - -## New features: -- V2 of consensus check with privacy layer (zk) -- V2 of payment mechanism with testnet tokens -- Standardized namespaces in XML -- Kademlia security improved -- Separated import form replication -- Updated Smart Contracts -- Developed large number of test XML files - -## Fixes: -- Eclipse bugs resolved -- Added license to repository -- Improved tests and CI execution time reduced -- Additional refactoring of the code -- Increased test coverage -- Numerous small fixes ---- - -## V0.9.0a “Explorer” release notes (04/06/2018) -# V0.9.0a “Explorer” release notes -## Release date: June 4th, 2018 - -### The Explorer release -Explorer now supports more features on the privacy layer which includes the zero-knowledge algorithm published a month ago in Zond. It brings the ability to handle private data within the system in such a way that the owner can retain control of the information by their DC (data creator) node, while publishing cryptographic commitments in the system to the DH (data holder) nodes involved in replication. - -Our data importer now supports the "Web of Things" (WoT) standard developed by the World Wide Web Consortium (W3C). It is a first major step towards IoT support and one of the major requirements as a consequence of the first use cases, our partners and community. - -## New features: -- Improved privacy layer implementation -- Web of Things model supported by our data importer -- New version of Houston (GUI) with data graph visualisation and improved interface -- Remote control API improvement -- IoC implementation in the code - -## Fixes: -- Importer error handling fixes -- Better organisation of the code and refactoring of the singletone classes - IoC -- Improved blockchain tests - truffle test suite -- Additional refactoring of the code -- Increased test coverage ---- - -## V0.8.0a “Surveyor” release notes (04/06/2018) -# V0.8.0a “Surveyor” release notes -## Release date: May 21st, 2018 - -### The Surveyor release introduces: -- an improved version of the bidding mechanism and -- the first version of the long awaited consensus check which utilizes the zero-knowledge privacy layer we have been working on in the previous releases. - -## New features: -- More efficient bidding on agreements for nodes -- Consensus check on top of zero-knowledge privacy layer allows validating the observed supply chain -- Remote control API improvement -- Created new smart contract for Bidding -- Improved initial configuration verification - -## Fixes: -- Several configuration bugs -- More verbose error reporting -- Additional refactoring of the code -- Increased test coverage and started integration tests ---- - -## V0.7.0a “Zond” release notes (07/05/2018) -# V0.7.0a “Zond” release notes -## Release date: May 7th, 2018 - -### The Zond release introduces: -- A standardized and documented OriginTrail graph ontology structure and compatibility with the Neo4j graph database. This compatibility enhances the flexibility in the data layer as it allows for selecting more than one underlying database system. Neo4j is a graph database platform that powers many of today's mission-critical enterprise applications, including artificial intelligence, fraud detection and recommendations. It is used by companies such as Microsoft, Walmart, Ebay, Airbnb, Volvo, Cisco, Microsoft, LinkedIn, UBS, novo nordisk, and many others. -- The first version of the zero-knowledge privacy protocol for sharing sensitive data in an encrypted, but publicly verifiable form. This makes the OriginTrail protocol more attractive to companies who would like the competitive advantage of increased transparency and efficiency in their supply chains. - -## New features: -- First iteration of Zero knowledge validation algorithm -- Neo4J Compatibility -- Implementation of new graph ontology -- First version of UI -- Created new smart contract for Bidding -- Created initial configuration verification - balance on wallet etc. - -## Fixes: -- GS1 Importer refactoring -- Completely refactored node communication flow in the bidding mechanism -- Fixed timed events and updated to work on Smart contract events -- Increased test coverage ---- - -## V0.6.0a “Kosmos” release notes (23/04/2018) -# V0.6.0a “Kosmos” release notes -## Release date: April 23rd, 2018 - -Kosmos release brings three vital improvements to the system. They are: - -- Full GS1 standards validation: We have improved the GS1 import integration experience by adding full GS1 data validation. This helps speed up integrations. -- The first implementation of the market bidding mechanism. -- Fully implemented blockchain fingerprinting virtualization documentation, which explains how the blockchain layer of the protocol will become compatible with blockchains other than Ethereum. - -## New features: -- Implemented first version of bidding mechanism covering the full flow - offer broadcast, sending bids, offer reveal and choose -- Finished abstraction of blockchain interface -- Finished GS1 import validation with error reporting -- Created new smart contract for Bidding - -## Fixes: -- Improved unit tests coverage of the code to 80% of covered modules -- Separated logic for DC and DH in the code -- Implemented sequelize ORM instead of raw queries to majority of database calls -- Reintroduced dotenv for easier initial configuration -- Improved network module - several bugs fixed in Kademlia implementation -- Polished and improved refactoring of the code done in previous release - - - ---- - -## V0.5.0a “Ranger” release notes (09/04/2018) -# V0.5.0a “Ranger” release notes -## Release date: April 9th, 2018 - -This release introduces a generalized graph structure document that is able to cover a wide range of use cases. We had a major update of the graph logic based on usage insights and partner companies' suggestions. - -We also implemented a more advanced challenging mechanism (DC sending challenges to DH) and improved payment mechanism that allows DH to pick up the agreement fee anytime with a single transaction. - -Above all we did a major refactoring of the code making it much more scalable and production ready. A new improved version of Kademlia protocol was implemented, including numerous protections to mitigate DoS, Sybil and Eclipse attacks. We introduced better organization of the database and switched full messaging from API to Kademlia direct messages. - -## New features: -- [Documented Graph structure](https://github.com/OriginTrail/ot-node/wiki/Graph-structure-in-OriginTrail-Data-Layer---version-1.0) -- Improved payment mechanism -- Full Kademlia implementation -- DoS protection -- Sybil and Eclipse attacks protection -- Automatic NAT traversal -- Cryptographic Identities -- RSA keys generation and HTTPS transport for Kademlia -- Reduced number of servers to only one (change from IPC + RPC to just OT node) -- Possibility to connect several nodes -- Introduced SQLite instead of MongoDB for System storage including database agnostic interface -- Database migrations -- Transaction Queue -- Improved unit tests coverage of the code - -## Fixes: -- Transaction collisions fixed -- Major refactor of the code -- Most functions documented in the code (docblocks) -- Better logging system -- Many of deprecated packages have been replaced - -### NOTE: This release brings many breaking changes so the *installation instructions* will be updated in the following days! - - ---- - -## V0.4.0a “Mechta” release notes (26/03/2018) -# V0.4.0a “Mechta” release notes -## Release date: March 12th, 2018 - -The second release in March features the fully compliant GS1 standard importer as well as documented incentivisation system and taxonomy of different network entities that can be found on our Wiki [here](https://github.com/OriginTrail/ot-node/wiki/OriginTrail-Incentive-model-v1). - -Apart from GS1 standard importer, as a major milestone, this release should be considered as an intermediate release used for the purposes of conceptualising the full incentivisation model that will be implemented during the following releases. - -This release also brings a lot of improvements for our contributors like major cleanup of the source code, most important methods covered with unit tests as well as eslint standards completely defined and code properly linted. - -## New features: -- Fully GS1 standard compliant importer -- [Documented incentive model](https://github.com/OriginTrail/ot-node/wiki/OriginTrail-Incentive-model-v1) -- Significantly improved unit tests coverage of the code -- Database tests -- Code is completely linted - -## Fixes: -- Fixed transaction bugs -- Fixed RPC start bug on OSX -- Refactoring of utilities and several other classes -- Integration of Eslint to TravisCI - - ---- - -## V0.3.0a “Luna” release notes (12/03/2018) -# V0.3.0a “Luna” release notes -## Release date: March 12th, 2018 - -The first release in March features the first test compensation system with alpha tokens on the Ethereum Rinkeby test network. Utilizing a custom designed TAR scheme (“Test-Answer-Receipt” protocol) it allows for random checks on the availability of OT services and data provided by the DH (data holder) nodes, according to the predefined “deal” (service conditions, price, and longevity). The compensation is handled according to the results of the checks and allows the DH node to independently collect tokens from a Service escrow smart contract. - -The node installation instructions can be found [here](https://github.com/OriginTrail/ot-node/wiki/Integration-Instructions). Alpha tokens can be obtained by request from our team at support@origin-trail.com - send us your Rinkeby wallet addresses and our team will forward you the test tokens. For the future versions, we plan on implementing a token faucet. - -This release should be considered as an intermediate release used for the purposes of experimentation and for determining the appropriate token compensation mechanisms. - -## New features: - -- Initial payment mechanism using Alpha test token on __Rinkeby network__, utilizing an Escrow smart contract -- Signed compensation receipts and payout method -- Graph hashing using Merkle trees, improved by utilizing one fingerprint per import -- Updated XML importer and XML structure according to business needs to be observed in pilots -- Introduced JSON file importer for replication -- Node vertices encryption utilizing RSA -- Automated (proto) Proof-of-Service testing based on random checks and TAR protocol -- Usage of __MongoDB__ for storing session data -- Using uPnP port forwarding for nodes behind NAT -- Various interface improvements - both servers (IPC and RPC) can be started with the single command `npm start`, for new versions of the node you will be able to automatically update it just by running `node update`, increased verbosity of terminal messages and logging into the file `log.log`, etc. - - -## Notes: - -- The current prototype doesn’t support full replication yet. Currently, there’s one DH node associated per each DC node, for testing purposes, while further improvements are coming with [Ranger and Kosmos](https://origintrail.io/roadmap) releases. -- Additionally, the next release ([Mechta](https://origintrail.io/roadmap)) will feature Kadence instead of Kad and Quasar because of their depreciation, as well as to address the NAT port forwarding issue - -## Known issues: - -- Currently, messages between DC (data creator) and DH (data holder) are not signed so Sybil attacks are possible. This will be addressed in [Ranger](https://origintrail.io/roadmap) release -- The DH node does not automatically verify that the escrow was created, which puts DC in a favorable position. This will be addressed by the [Kosmos](https://origintrail.io/roadmap) release -- Implemented uPnP NAT port forwarding functions only if the node is behind one router, but not behind chained multiple routers, and if the router has a public IP address. -- Minimum RAM for install process is 1GB, on servers with __512MB__ or less, a swap file is needed diff --git a/importers/xml_examples/Basic/01_Green_to_pink_shipment_modified_event_timestamp.xml b/importers/xml_examples/Basic/01_Green_to_pink_shipment_modified_event_timestamp.xml new file mode 100644 index 0000000000..4e7a2394be --- /dev/null +++ b/importers/xml_examples/Basic/01_Green_to_pink_shipment_modified_event_timestamp.xml @@ -0,0 +1,136 @@ + + + + + 1.0 + + urn:ot:object:actor:id:Company_Green + + Abraham Smith + abraham_Smith@green.com + + + + urn:ot:object:actor:id:Company_Pink + + Betty Johnson + betty@pink.com + + + + GS1 + V1.3 + 100001 + Shipment + 2018-01-01T00:31:52Z + + + + BusinessProcess + Shipment/version2-251 + EDI-Shipment + + + + + + + + + + Green + Company + 0xBbAaAd7BD40602B78C0649032D2532dEFa23A4C0 + + + Pink + Company + 0xFfDDAd7BD40602B78C0649032D2532dEFa23A4C0 + + + + + + + + Building + Producer Warehouses + urn:ot:object:actor:id:Company_Green + + urn:epc:id:sgln:Building_Green_V1 + urn:epc:id:sgln:Building_Green_V2 + + + + Building + Pink distributor warehouse + urn:ot:object:actor:id:Company_Pink + + + + + + + + Beverage + Wine Bottle + + + + + + + + urn:ot:object:product:id:Product_1 + 2017-31-12T00:01:54Z + 2020-31-12T00:01:54Z + + + + + + + + + + + + 2018-02-01T01:22:01.000-04:00 + -04:00 + + urn:epc:id:sgtin:Batch_1 + + OBSERVE + urn:epcglobal:cbv:bizstep:shipping + urn:epcglobal:cbv:disp:active + + urn:epc:id:sgln:Building_Green_V2 + + + urn:epc:id:sgln:Building_Green + + + + + urn:epc:id:sgtin:Batch_1 + 10 + PCS + + + + GREENSHIP1 + urn:ot:event:Transport + urn:ot:event:Ownership + Sales + + urn:epc:id:sgln:Building_Green + + + urn:epc:id:sgln:Building_Pink + + + + + + + \ No newline at end of file diff --git a/migrations/20180407123625-create-holding-data.js b/migrations/20180407123625-create-holding-data.js index 2f479dcfa6..b7d349123a 100644 --- a/migrations/20180407123625-create-holding-data.js +++ b/migrations/20180407123625-create-holding-data.js @@ -35,6 +35,10 @@ module.exports = { allowNull: false, type: Sequelize.STRING, }, + color: { + allowNull: false, + type: Sequelize.INTEGER, + }, }), down: (queryInterface, Sequelize) => queryInterface.dropTable('holding_data'), }; diff --git a/migrations/20180407130255-create-replicated-data.js b/migrations/20180407130255-create-replicated-data.js index 7093dd748c..4e8b93ad1c 100644 --- a/migrations/20180407130255-create-replicated-data.js +++ b/migrations/20180407130255-create-replicated-data.js @@ -25,7 +25,7 @@ module.exports = { }, color: { allowNull: false, - type: Sequelize.STRING, + type: Sequelize.INTEGER, }, litigation_public_key: { allowNull: false, diff --git a/models/holding_data.js b/models/holding_data.js index a86e9bb0b1..083bb5bfb2 100644 --- a/models/holding_data.js +++ b/models/holding_data.js @@ -8,6 +8,7 @@ module.exports = (sequelize, DataTypes) => { distribution_private_key: DataTypes.STRING, distribution_epk: DataTypes.STRING, transaction_hash: DataTypes.STRING(128), + color: DataTypes.INTEGER, }, { tableName: 'holding_data', }); diff --git a/models/replicated_data.js b/models/replicated_data.js index 7ddf471e75..ab816a6a3f 100644 --- a/models/replicated_data.js +++ b/models/replicated_data.js @@ -5,7 +5,7 @@ module.exports = (sequelize, DataTypes) => { dh_wallet: DataTypes.STRING, dh_identity: DataTypes.STRING, offer_id: DataTypes.STRING, - color: DataTypes.STRING, + color: DataTypes.INTEGER, litigation_public_key: DataTypes.STRING, distribution_public_key: DataTypes.STRING, distribution_private_key: DataTypes.STRING, diff --git a/modules/DVService.js b/modules/DVService.js index 5eeb470d57..fc395fafd6 100644 --- a/modules/DVService.js +++ b/modules/DVService.js @@ -88,8 +88,7 @@ class DVService { throw errorMessage; } - const merkle = await ImportUtilities.merkleStructure(vertices.filter(vertex => - vertex.vertex_type !== 'CLASS'), edges); + const merkle = await ImportUtilities.merkleStructure(vertices, edges); const rootHash = merkle.tree.getRoot(); if (escrow.distribution_root_hash !== rootHash) { @@ -100,9 +99,8 @@ class DVService { throw errorMessage; } - let importResult; try { - importResult = await this.importer.importJSON({ + await this.importer.importJSON({ vertices: message.encryptedData.vertices, edges: message.encryptedData.edges, import_id: importId, diff --git a/modules/Database/Arangojs.js b/modules/Database/Arangojs.js index 58ced41e73..99ffc5a87c 100644 --- a/modules/Database/Arangojs.js +++ b/modules/Database/Arangojs.js @@ -1,7 +1,7 @@ const { Database } = require('arangojs'); const request = require('superagent'); const Utilities = require('../Utilities'); -const { denormalizeGraph, normalizeGraph } = require('./graph-converter'); +const { normalizeGraph } = require('./graph-converter'); const IGNORE_DOUBLE_INSERT = true; @@ -37,94 +37,6 @@ class ArangoJS { }))); } - /** - * Find set of vertices with _key, vertex_type and identifiers values - * @param queryObject Query for getting vertices - * @returns {Promise} - */ - async findVertices(queryObject) { - let queryString = ''; - const params = {}; - const { query } = queryObject[0]; - if (Utilities.isEmptyObject(queryObject) === false) { - let count = 1; - for (const key in query) { - if (key.match(/^[.\w\d]+$/g) !== null) { - if (key === 'uid') { - queryString += ` - LET v_res${count} = ( - FOR v${count} IN ot_vertices - FILTER v${count}.uid == "@param${count}" - RETURN {"datasets": v${count}.datasets, "objects": [v${count}]} - )`; - } else { - queryString += ` - LET v_res${count} = ( - FOR v${count} IN ot_vertices - LET objects = ( - FOR w${count}, e IN 1..1 - OUTBOUND v${count}._id GRAPH "origintrail_graph" - FILTER e.edge_type == "IDENTIFIES" - AND LENGTH(INTERSECTION(e.datasets, v${count}.datasets)) > 0 - RETURN w${count} - ) - FILTER v${count}.vertex_type == "IDENTIFIER" - AND v${count}.id_type == "${key}" - AND v${count}.id_value == "@param${count}" - RETURN {"datasets": v${count}.datasets, "objects": objects} - ) - `; - } - const param = `param${count}`; - - count += 1; - params[param] = query[key]; - } - } - - for (let i = 1; i <= count; i += 1) { - queryString += ` - FILTER LENGTH(v_res${i}) > 0 - `; - } - - queryString += 'AND LENGTH(INTERSECTION(v_res1[0].datasets'; - - - for (let i = 1; i <= count; i += 1) { - queryString += ` - , v_res${i}[0].datasets`; - } - - queryString += ')) > 0 RETURN {datasets: INTERSECTION(v_res1[0].datasets'; - - for (let i = 1; i <= count; i += 1) { - queryString += ` - , v_res${i}[0].datasets`; - } - - queryString += '), "objects": INTERSECTION(v_res1[0].objects'; - - for (let i = 1; i <= count; i += 1) { - queryString += ` - , v_res${i}[0].objects`; - } - - queryString += '), '; - - const results = []; - - for (let i = 1; i <= count; i += 1) { - results.push(`"v${i}": v_res${i}[0].objects`); - } - - queryString += results.join(','); - queryString += '}'; - - return this.runQuery(queryString, params); - } - } - /** * Find set of documents with _key, vertex_type and identifiers values * @param queryObject Query for getting documents @@ -160,11 +72,16 @@ class ArangoJS { } /** - * Finds vertices by query defined in DataLocationRequestObject - * @param inputQuery + * Finds imports IDs based on data location query + * + * DataLocationQuery structure: [[path, value, opcode]*] + * + * @param {?number} encColor - Encrypted color (0=RED,1=GREEN,2=BLUE) + * @param {object} dataLocationQuery - Search query + * @return {Promise} */ - async findImportIds(inputQuery, encrypted) { - const results = await this.dataLocationQuery(inputQuery, encrypted); + async findImportIds(dataLocationQuery, encColor = null) { + const results = await this.dataLocationQuery(dataLocationQuery, encColor); if (results.length > 0) { return results[0].datasets; } @@ -175,7 +92,7 @@ class ArangoJS { const query = `FOR v IN ot_vertices FILTER v.vertex_type == 'EVENT' AND v.sender_id == @sender_id - AND v.encrypted != true + AND v.encrypted == null RETURN v`; const res = await this.runQuery(query, { @@ -216,15 +133,11 @@ class ArangoJS { /** * Finds vertices by query defined in DataLocationRequestObject - * @param inputQuery + * @param {?number} encColor - Encrypted color (0=RED,1=GREEN,2=BLUE) + * @param inputQuery - Search query */ - async dataLocationQuery(inputQuery, encrypted = false) { + async dataLocationQuery(inputQuery, encColor = null) { const params = {}; - let encOp = '!='; - - if (encrypted) { - encOp = '=='; - } let count = 1; let queryString = ''; @@ -249,24 +162,29 @@ class ArangoJS { OUTBOUND v${count}._id ot_edges FILTER e.edge_type == "IDENTIFIES" AND LENGTH(INTERSECTION(e.datasets, v${count}.datasets)) > 0 - AND v${count}.encrypted ${encOp} true + AND v${count}.encrypted == ${encColor} RETURN w${count}) `; switch (opcode) { case 'EQ': filter += `FILTER v${count}.vertex_type == "IDENTIFIER" - AND v${count}.id_type == "${id_type}" - AND v${count}.id_value == "${id_value}" - AND v${count}.encrypted ${encOp} true + AND v${count}.id_type == @id_type${count} + AND v${count}.id_value == @id_value${count} + AND v${count}.encrypted == ${encColor} `; + params[`id_type${count}`] = id_type; + params[`id_value${count}`] = id_value; break; case 'IN': filter += `FILTER v${count}.vertex_type == "IDENTIFIER" - AND v${count}.id_type == "${id_type}" - AND "${id_value}" IN v${count}.id_value - AND v${count}.encrypted ${encOp} true + AND v${count}.id_type == @id_type${count} + AND @id_value${count} IN v${count}.id_value + AND v${count}.encrypted == ${encColor} `; + + params[`id_type${count}`] = id_type; + params[`id_value${count}`] = id_value; break; default: throw new Error(`OPCODE ${opcode} is not defined`); @@ -734,21 +652,18 @@ class ArangoJS { } } - async findVerticesByImportId(data_id, encrypted = false) { - let queryString = ''; - if (encrypted) { - queryString = `FOR v IN ot_vertices + /** + * Finds vertices by dataset ID + * @param {string} data_id - Dataset ID + * @param {?number} encColor - Encrypted color (0=RED,1=GREEN,2=BLUE) + * @return {Promise<*>} + */ + async findVerticesByImportId(data_id, encColor = null) { + const queryString = `FOR v IN ot_vertices FILTER v.datasets != null AND POSITION(v.datasets, @importId, false) != false - AND (v.encrypted == true) + AND (v.encrypted == ${encColor}) SORT v._key RETURN v`; - } else { - queryString = `FOR v IN ot_vertices - FILTER v.datasets != null - AND POSITION(v.datasets, @importId, false) != false - AND (v.encrypted != true) - SORT v._key RETURN v`; - } const params = { importId: data_id }; const vertices = await this.runQuery(queryString, params); @@ -775,30 +690,23 @@ class ArangoJS { return this.runQuery(queryString, {}); } - async findEdgesByImportId(data_id, encrypted = false) { - let queryString = ''; - - if (encrypted) { - queryString = 'FOR v IN ot_edges ' + - 'FILTER v.datasets != null ' + - 'AND POSITION(v.datasets, @importId, false) != false ' + - 'AND v.encrypted == true ' + - 'SORT v._key ' + - 'RETURN v'; - } else { - queryString = 'FOR v IN ot_edges ' + + /** + * Find edges by dataset ID + * @param {string} data_id - Dataset ID + * @param {?number} encColor - Encrypted color (0=RED,1=GREEN,2=BLUE) + * @return {Promise} + */ + async findEdgesByImportId(data_id, encColor = null) { + const queryString = 'FOR v IN ot_edges ' + 'FILTER v.datasets != null ' + 'AND POSITION(v.datasets, @importId, false) != false ' + - 'AND v.encrypted != true ' + + `AND v.encrypted == ${encColor} ` + 'SORT v._key ' + 'RETURN v'; - } const params = { importId: data_id }; const edges = await this.runQuery(queryString, params); - const normalizedEdges = normalizeGraph(data_id, [], edges).edges; - - return normalizedEdges; + return normalizeGraph(data_id, [], edges).edges; } /** @@ -816,7 +724,7 @@ class ArangoJS { // 'RETURN v'; const queryString = `FOR v IN ot_vertices - FILTER v.vertex_type == 'EVENT' and v.encrypted != true + FILTER v.vertex_type == 'EVENT' and v.encrypted == null RETURN v`; const params = {}; const result = await this.runQuery(queryString, params); diff --git a/modules/Database/GraphStorage.js b/modules/Database/GraphStorage.js index 7d2c49fe9d..64017083ac 100644 --- a/modules/Database/GraphStorage.js +++ b/modules/Database/GraphStorage.js @@ -8,11 +8,12 @@ class GraphStorage { * @param logger * @param selectedDatabase Selected graph database */ - constructor(selectedDatabase, logger) { + constructor(selectedDatabase, logger, notifyError) { this.logger = logger; this.selectedDatabase = selectedDatabase; this._allowedClasses = ['Location', 'Actor', 'Product', 'Transport', 'Transformation', 'Observation', 'Ownership']; + this.notifyError = notifyError; } /** @@ -65,39 +66,21 @@ class GraphStorage { }); } - /** - * Find set of vertices from Graph storage - * @param queryObject Query for getting vertices - * @returns {Promise} - */ - findVertices(queryObject) { - return new Promise((resolve, reject) => { - if (!this.db) { - reject(Error('Not connected to graph database')); - } else { - this.db.findVertices(queryObject).then((result) => { - resolve(result); - }).catch((err) => { - reject(err); - }); - } - }); - } - /** * Finds imports IDs based on data location query * * DataLocationQuery structure: [[path, value, opcode]*] * - * @param dataLocationQuery + * @param encColor - Encrypted color + * @param dataLocationQuery - Search query * @return {Promise} */ - findImportIds(dataLocationQuery, encrypted) { + findImportIds(dataLocationQuery, encColor = null) { return new Promise((resolve, reject) => { if (!this.db) { reject(Error('Not connected to graph database')); } else { - this.db.findImportIds(dataLocationQuery, encrypted).then((result) => { + this.db.findImportIds(dataLocationQuery, encColor).then((result) => { resolve(result); }).catch((err) => { reject(err); @@ -108,10 +91,11 @@ class GraphStorage { /** * Finds vertices by query defined in DataLocationRequestObject + * @param encColor * @param inputQuery */ - async dataLocationQuery(inputQuery) { - return this.db.dataLocationQuery(inputQuery); + async dataLocationQuery(inputQuery, encColor = null) { + return this.db.dataLocationQuery(inputQuery, encColor); } /** @@ -264,21 +248,28 @@ class GraphStorage { * Finds all object classes * @return {Promise<*>} */ - findObjectClassVertices() { - return this.db.findObjectClassVertices(); + async findObjectClassVertices() { + const classes = await this.db.findObjectClassVertices(); + if (classes.length === 0) { + this.notifyError(new Error('Missing class vertices')); + await this.__initDatabase__(); + return this.db.findObjectClassVertices(); + } + return classes; } /** * Get list of vertices by import ID - * @param importId Import ID + * @param importId - Import ID + * @param encColor - Encrypted color * @return {Promise} */ - findVerticesByImportId(importId, encrypted) { + findVerticesByImportId(importId, encColor = null) { return new Promise((resolve, reject) => { if (!this.db) { reject(Error('Not connected to graph database')); } else { - this.db.findVerticesByImportId(importId, encrypted).then((result) => { + this.db.findVerticesByImportId(importId, encColor).then((result) => { resolve(result); }).catch((err) => { reject(err); @@ -289,15 +280,16 @@ class GraphStorage { /** * Gets edges by import ID from the underlying database - * @param import_id Import ID + * @param datasetId - Dataset ID + * @param encColor - Encrypted color * @returns {Promise} */ - findEdgesByImportId(import_id, encrypted) { + findEdgesByImportId(datasetId, encColor) { return new Promise((resolve, reject) => { if (!this.db) { reject(Error('Not connected to graph database')); } else { - this.db.findEdgesByImportId(import_id, encrypted).then((result) => { + this.db.findEdgesByImportId(datasetId, encColor).then((result) => { resolve(result); }).catch((err) => { reject(err); diff --git a/modules/EventEmitter.js b/modules/EventEmitter.js index 06acc64690..148a400498 100644 --- a/modules/EventEmitter.js +++ b/modules/EventEmitter.js @@ -3,7 +3,6 @@ const events = require('events'); const Challenge = require('./Challenge'); const Utilities = require('./Utilities'); -const Graph = require('./Graph'); const Models = require('../models'); const ImportUtilities = require('./ImportUtilities'); const ObjectValidator = require('./validator/object-validator'); @@ -97,27 +96,8 @@ class EventEmitter { dcService, dvController, notifyError, - transport, } = this.ctx; - this._on('api-node-info', async (data) => { - try { - const system = await transport.getNetworkInfo(); - data.response.status(200); - data.response.send({ - system, - config, - }); - } catch (err) { - logger.error('Failed to get node info'); - notifyError(err); - data.response.status(500); - data.response.send({ - message: err, - }); - } - }); - this._on('api-network-query-responses', async (data) => { const { query_id } = data; logger.info(`Query for network response triggered with query ID ${query_id}`); @@ -354,9 +334,6 @@ class EventEmitter { }); this._on('api-choose-offer', async (data) => { - if (!appState.enoughFunds) { - return; - } const failFunction = (error) => { logger.warn(error); data.response.status(400); @@ -404,7 +381,7 @@ class EventEmitter { const networkQuery = await Models.network_queries.find({ where: { id } }); if (networkQuery.status === 'FINISHED') { try { - const vertices = await dhService.dataLocationQuery(id, true); + const vertices = await dhService.dataLocationQuery(id); response.status(200); response.send({ @@ -872,7 +849,8 @@ class EventEmitter { logger.info(`Challenge arrived: Block ID ${message.payload.block_id}, Import ID ${message.payload.import_id}`); const challenge = message.payload; - let vertices = await this.graphStorage.findVerticesByImportId(challenge.import_id); + let vertices = await this.graphStorage + .findVerticesByImportId(challenge.import_id); // TODO add encColor ImportUtilities.unpackKeys(vertices, []); ImportUtilities.sort(vertices); // filter CLASS vertices diff --git a/modules/GS1Importer.js b/modules/GS1Importer.js index 4a10c4c978..ad93ee9224 100644 --- a/modules/GS1Importer.js +++ b/modules/GS1Importer.js @@ -17,6 +17,7 @@ class GS1Importer { this.helper = ctx.gs1Utilities; this.log = ctx.logger; this.config = ctx.config; + this.notifyError = ctx.notifyError; } async processXML(err, result) { @@ -796,7 +797,7 @@ class GS1Importer { const dataInfo = await models.data_info.find({ where: { data_set_id: dataSetId } }); if (dataInfo) { - throw new Error(`Data set ${dataSetId} has already been imported`); + throw new ImporterError(`Data set ${dataSetId} has already been imported`); } // eslint-disable-next-line const { vertices: newDenormalizedVertices, edges: newDenormalizedEdges } = denormalizeGraph(dataSetId, allVertices, allEdges); @@ -834,7 +835,7 @@ class GS1Importer { }); return { - vertices: normalizedVertices, + vertices: normalizedVertices.concat(objectClasses), edges: normalizedEdges, data_set_id: dataSetId, wallet: senderWallet, diff --git a/modules/ImportUtilities.js b/modules/ImportUtilities.js index f4840718d2..1e416b7066 100644 --- a/modules/ImportUtilities.js +++ b/modules/ImportUtilities.js @@ -17,14 +17,15 @@ class ImportUtilities { * Hides _key attributes * @param vertices * @param edges + * @param color */ - static packKeys(vertices, edges) { + static packKeys(vertices, edges, color) { for (const vertex of vertices) { if (!vertex._dc_key) { vertex._dc_key = vertex._key; vertex._key = uuidv4(); } - vertex.encrypted = true; + vertex.encrypted = color; } // map _from and _to const find = (key) => { @@ -44,7 +45,7 @@ class ImportUtilities { edge._to = to; } - edge.encrypted = true; + edge.encrypted = color; } for (const edge of edges) { if (!edge._dc_key) { @@ -87,8 +88,9 @@ class ImportUtilities { /** * Normalizes import (use just necessary data) - * @param vertices Import vertices - * @param edges Import edges + * @param dataSetId - Dataset ID + * @param vertices - Import vertices + * @param edges - Import edges * @returns {{edges: *, vertices: *}} */ static normalizeImport(dataSetId, vertices, edges) { diff --git a/modules/Product.js b/modules/Product.js index a106b775bc..43278ed633 100644 --- a/modules/Product.js +++ b/modules/Product.js @@ -59,7 +59,7 @@ class Product { const dlQuery = this.convertToDataLocationQuery(queryObject); - this.graphStorage.dataLocationQuery(dlQuery, false).then(async (response) => { + this.graphStorage.dataLocationQuery(dlQuery).then(async (response) => { if (!response[0] || response[0].length === 0 || response[0].objects.length === 0) { resolve([]); return; diff --git a/modules/RemoteControl.js b/modules/RemoteControl.js index 54519a784b..9f5657a0d2 100644 --- a/modules/RemoteControl.js +++ b/modules/RemoteControl.js @@ -95,7 +95,7 @@ class RemoteControl { this.socket.initialize(socket); this.transport.getNetworkInfo().then((res) => { socket.emit('system', { info: res }); - socket.emit('config', this.config); + socket.emit('config', this.config); // TODO think about stripping some config values }).then((res) => { this.updateImports(); }); diff --git a/modules/ZK.js b/modules/ZK.js index 56f9a26cd9..6ef4f462fd 100644 --- a/modules/ZK.js +++ b/modules/ZK.js @@ -6,9 +6,7 @@ class ZK { constructor(ctx) { this.zero = new BN(0); this.one = new BN(1); - this.p = new BN(941078291); - this.q = new BN(941072309); - this.n = this.p.mul(this.q); + this.n = new BN('14fef784d91e20718aee8ef1', 16); this.nSquare = this.n.mul(this.n); this.red = BN.red(this.n); this.redSquare = BN.red(this.nSquare); diff --git a/modules/command/dc/dc-offer-finalize-command.js b/modules/command/dc/dc-offer-finalize-command.js index 6e80a66d29..2142026e43 100644 --- a/modules/command/dc/dc-offer-finalize-command.js +++ b/modules/command/dc/dc-offer-finalize-command.js @@ -45,7 +45,7 @@ class DCOfferFinalizeCommand extends Command { const confirmations = []; for (const identity of nodeIdentifiers) { const replication = replications.find(r => identity.includes(r.dh_identity)); - colors.push(this.replicationService.castColorToNumber(replication.color)); + colors.push(replication.color); confirmations.push(replication.confirmation); } diff --git a/modules/command/dh/dh-data-read-request-free-command.js b/modules/command/dh/dh-data-read-request-free-command.js index b10a53b739..43661a0f5f 100644 --- a/modules/command/dh/dh-data-read-request-free-command.js +++ b/modules/command/dh/dh-data-read-request-free-command.js @@ -66,34 +66,44 @@ class DHDataReadRequestFreeCommand extends Command { throw Error(`Failed to get data info for import ID ${importId}.`); } - const encrypted = dataInfo.origin === 'HOLDING'; - const verticesPromise = this.graphStorage.findVerticesByImportId(importId, encrypted); - const edgesPromise = this.graphStorage.findEdgesByImportId(importId, encrypted); + let edges; + let vertices; + if (dataInfo.origin === 'HOLDING') { // DH has the data + // Get replication key and then encrypt data. + const holdingDataModels = await Models.holding_data.findAll({ + where: { data_set_id: importId }, + }); + + let holdingDataModel = null; + if (holdingDataModels.length > 0) { + [holdingDataModel] = holdingDataModels; // take the first one + } + + const encColor = holdingDataModel !== null ? holdingDataModel.color : null; + const verticesPromise + = this.graphStorage.findVerticesByImportId(importId, encColor); + const edgesPromise + = this.graphStorage.findEdgesByImportId(importId, encColor); + + [vertices, edges] = await Promise.all([verticesPromise, edgesPromise]); + ImportUtilities.unpackKeys(vertices, edges); - const values = await Promise.all([verticesPromise, edgesPromise]); - const vertices = values[0]; - const edges = values[1]; - - ImportUtilities.unpackKeys(vertices, edges); - ImportUtilities.deleteInternal(edges); - ImportUtilities.deleteInternal(vertices); - - // Get replication key and then encrypt data. - const holdingDataModel = await Models.holding_data.find({ - where: { data_set_id: importId }, - }); - - if (holdingDataModel) { const holdingData = holdingDataModel.get({ plain: true }); const dataPublicKey = holdingData.litigation_public_key; - const replicationPrivateKey = holdingData.distribution_private_key; Graph.decryptVertices( vertices.filter(vertex => vertex.vertex_type !== 'CLASS'), dataPublicKey, ); + } else { // DC or DV + const verticesPromise = this.graphStorage.findVerticesByImportId(importId); + const edgesPromise = this.graphStorage.findEdgesByImportId(importId); + [vertices, edges] = await Promise.all([verticesPromise, edgesPromise]); } + ImportUtilities.deleteInternal(edges); + ImportUtilities.deleteInternal(vertices); + const transactionHash = await ImportUtilities .getTransactionHash(dataInfo.data_set_id, dataInfo.origin); diff --git a/modules/command/dh/dh-offer-handle-command.js b/modules/command/dh/dh-offer-handle-command.js index 433d40d7ee..063630a477 100644 --- a/modules/command/dh/dh-offer-handle-command.js +++ b/modules/command/dh/dh-offer-handle-command.js @@ -84,6 +84,7 @@ class DHOfferHandleCommand extends Command { distributionEpk: response.distribution_epk, distributionSignature: response.distribution_signature, transactionHash: response.transaction_hash, + encColor: response.color, }; } diff --git a/modules/command/dh/dh-read-data-location-request-command.js b/modules/command/dh/dh-read-data-location-request-command.js index 0885989917..1533434885 100644 --- a/modules/command/dh/dh-read-data-location-request-command.js +++ b/modules/command/dh/dh-read-data-location-request-command.js @@ -25,7 +25,7 @@ class DHReadDataLocationRequestCommand extends Command { */ async execute(command) { const { - msgNodeId, msgWallet, msgQuery, msgId, encrypted, + msgNodeId, msgWallet, msgQuery, msgId, } = command.data; // Check if mine publish. @@ -35,7 +35,7 @@ class DHReadDataLocationRequestCommand extends Command { } // Handle query here. - const graphImports = await this.graphStorage.findImportIds(msgQuery, encrypted); + const graphImports = await this.graphStorage.findImportIds(msgQuery); // Filter imports not stored in local DB. let imports = await Models.data_info.findAll({ attributes: ['data_set_id'], diff --git a/modules/command/dh/dh-replication-import-command.js b/modules/command/dh/dh-replication-import-command.js index 24030e1afe..383eb932af 100644 --- a/modules/command/dh/dh-replication-import-command.js +++ b/modules/command/dh/dh-replication-import-command.js @@ -46,6 +46,7 @@ class DhReplicationImportCommand extends Command { distributionRootHash, distributionSignature, transactionHash, + encColor, } = command.data; const decryptedVertices = await ImportUtilities.immutableDecryptVertices(litigationVertices, litigationPublicKey); @@ -98,17 +99,17 @@ class DhReplicationImportCommand extends Command { await this.importer.importJSON({ dataSetId, - vertices: decryptedVertices, + vertices: litigationVertices, edges, wallet: dcWallet, - }, false); + }, true, encColor); let importResult = await this.importer.importJSON({ dataSetId, - vertices: litigationVertices, + vertices: decryptedVertices, edges, wallet: dcWallet, - }, true); + }, false); if (importResult.error) { throw Error(importResult.error); @@ -136,6 +137,7 @@ class DhReplicationImportCommand extends Command { distribution_private_key: distributionPrivateKey, distribution_epk: distributionEpk, transaction_hash: transactionHash, + color: encColor, }); this.logger.important(`[DH] Replication finished for offer ID ${offerId}`); diff --git a/modules/command/dv/dv-data-read-response-free-command.js b/modules/command/dv/dv-data-read-response-free-command.js index df37e06626..c670a2d0f9 100644 --- a/modules/command/dv/dv-data-read-response-free-command.js +++ b/modules/command/dv/dv-data-read-response-free-command.js @@ -88,8 +88,7 @@ class DVDataReadResponseFreeCommand extends Command { ImportUtilities.sort(vertices); ImportUtilities.sort(edges); - const merkle = await ImportUtilities.merkleStructure(vertices.filter(vertex => - vertex.vertex_type !== 'CLASS'), edges); + const merkle = await ImportUtilities.merkleStructure(vertices, edges); const rootHash = merkle.tree.getRoot(); if (fingerprint !== rootHash) { @@ -107,12 +106,6 @@ class DVDataReadResponseFreeCommand extends Command { dataSetId, wallet: dcWallet, }, false); - await this.importer.importJSON({ - vertices: message.data.vertices, - edges: message.data.edges, - dataSetId, - wallet: dcWallet, - }, true); } catch (error) { this.logger.warn(`Failed to import JSON. ${error}.`); this.notifyError(error); diff --git a/modules/importer.js b/modules/importer.js index 91c3015053..f71a0f6fd1 100644 --- a/modules/importer.js +++ b/modules/importer.js @@ -51,11 +51,19 @@ class Importer { }); } - async importJSON(json_document, packKeys = false) { + /** + * Import dataset in JSON format + * @param jsonDocument - Dataset document + * @param packKeys - Pack or not + * @param encColor - Encrypted color used when packing + * @return {Promise<*>} + */ + async importJSON(jsonDocument, packKeys = false, encColor = null) { try { const result = await this._import('JSON', { + encColor, packKeys, - json_document, + jsonDocument, }); return { response: await this.afterImport(result, packKeys), @@ -77,25 +85,26 @@ class Importer { this.log.info('Entering importJSON'); const { packKeys, - json_document, + encColor, + jsonDocument, } = data; let { vertices, edges, - } = json_document; + } = jsonDocument; const { dataSetId, wallet, - } = json_document; + } = jsonDocument; this.log.trace('Import vertices and edges'); ImportUtilities.deleteInternal(edges); ImportUtilities.deleteInternal(vertices); if (packKeys) { - ImportUtilities.packKeys(vertices, edges); + ImportUtilities.packKeys(vertices, edges, encColor); } vertices = await Promise.all(vertices.map(async (vertex) => { @@ -200,8 +209,7 @@ class Importer { edges = Graph.sortVertices(edges); vertices = Graph.sortVertices(vertices); - const merkle = await ImportUtilities.merkleStructure(vertices.filter(vertex => - vertex.vertex_type !== 'CLASS'), edges); + const merkle = await ImportUtilities.merkleStructure(vertices, edges); this.log.info(`Root hash: ${merkle.tree.getRoot()}`); this.log.info(`Data set ID: ${data_set_id}`); diff --git a/modules/network/kademlia/kademlia.js b/modules/network/kademlia/kademlia.js index 2a6fccbbfb..10fd6f24dd 100644 --- a/modules/network/kademlia/kademlia.js +++ b/modules/network/kademlia/kademlia.js @@ -185,7 +185,7 @@ class Kademlia { const publicationId = uuidv4(); const neighbors = [...this.node.router.getClosestContactsToKey( options.routingKey || this.node.identity.toString('hex'), - kadence.constants.ALPHA * 3, + this.node.router.size, ).entries()]; const errors = []; @@ -652,13 +652,13 @@ class Kademlia { if (err) { reject(err); } else { - if (successfulPublishes.length === 0) { + if (successfulPublishes === 0) { // Publish failed. reject(Error('Publish failed.')); return; } this.log.debug(`Published successfully to ${successfulPublishes} peers.`); - resolve(); + resolve(successfulPublishes); } }, ); @@ -758,21 +758,9 @@ class Kademlia { * Returns basic network information */ async getNetworkInfo() { - const peers = []; - const dump = this.node.router.getClosestContactsToKey( - this.node.identity, - kadence.constants.K * kadence.constants.B, - ); - - for (const peer of dump) { - peers.push(peer); - } - return { - versions: pjson.version, identity: this.node.identity.toString('hex'), contact: this.node.contact, - peers, }; } diff --git a/modules/service/dc-service.js b/modules/service/dc-service.js index b36503ab9d..6da349ec2d 100644 --- a/modules/service/dc-service.js +++ b/modules/service/dc-service.js @@ -256,6 +256,7 @@ class DCService { const colors = ['red', 'green', 'blue']; const color = colors[Utilities.getRandomInt(2)]; + const colorNumber = this.replicationService.castColorToNumber(color); const replication = await this.replicationService.loadReplication(offer.id, color); await models.replicated_data.create({ @@ -271,7 +272,7 @@ class DCService { distribution_root_hash: replication.distributionRootHash, distribution_epk: replication.distributionEpk, status: 'STARTED', - color, + color: colorNumber.toNumber(), }); const toSign = [ @@ -299,6 +300,7 @@ class DCService { distribution_signature: distributionSignature.signature, transaction_hash: offer.transaction_hash, distributionSignature, + color: colorNumber.toNumber(), }; // send replication to DH diff --git a/modules/service/dh-service.js b/modules/service/dh-service.js index 575c79c13d..b7c00ff6b3 100644 --- a/modules/service/dh-service.js +++ b/modules/service/dh-service.js @@ -290,7 +290,6 @@ class DHService { msgNodeId, msgWallet, msgQuery, - encrypted: false, }, }); } @@ -685,7 +684,7 @@ class DHService { await this.blockchain.answerLitigation(importId, answer); } - async dataLocationQuery(queryId, encrypted) { + async dataLocationQuery(queryId) { const networkQuery = await Models.network_queries.find({ where: { id: queryId } }); const validationError = ObjectValidator.validateSearchQueryObject(networkQuery); if (validationError) { @@ -697,7 +696,7 @@ class DHService { // Fetch the results. const importIds = - await this.graphStorage.findImportIds(networkQuery.query, encrypted); + await this.graphStorage.findImportIds(networkQuery.query); const decryptKeys = {}; // Get decode keys. @@ -716,7 +715,7 @@ class DHService { } const encodedVertices = - await this.graphStorage.dataLocationQuery(networkQuery.query, encrypted); + await this.graphStorage.dataLocationQuery(networkQuery.query); const vertices = []; encodedVertices[0].objects.forEach((encodedVertex) => { @@ -777,8 +776,8 @@ class DHService { const dataInfo = await Models.data_info.find({ where: { data_set_id: dataSetId } }); if (dataInfo) { - const verticesPromise = this.graphStorage.findVerticesByImportId(dataSetId, false); - const edgesPromise = this.graphStorage.findEdgesByImportId(dataSetId, false); + const verticesPromise = this.graphStorage.findVerticesByImportId(dataSetId); + const edgesPromise = this.graphStorage.findEdgesByImportId(dataSetId); const values = await Promise.all([verticesPromise, edgesPromise]); diff --git a/modules/service/rest-api-service.js b/modules/service/rest-api-service.js new file mode 100644 index 0000000000..28c0cdaa5a --- /dev/null +++ b/modules/service/rest-api-service.js @@ -0,0 +1,478 @@ +const fs = require('fs'); +const ip = require('ip'); +const restify = require('restify'); +const corsMiddleware = require('restify-cors-middleware'); + +const Utilities = require('../Utilities'); +const pjson = require('../../package.json'); +const RestAPIValidator = require('../validator/rest-api-validator'); + +class RestAPIService { + constructor(ctx) { + this.ctx = ctx; + this.config = ctx.config; + this.logger = ctx.logger; + this.apiUtilities = ctx.apiUtilities; + } + + /** + * Start RPC server + */ + async startRPC() { + const options = { + name: 'RPC server', + version: pjson.version, + formatters: { + 'application/json': (req, res, body) => { + res.set('content-type', 'application/json; charset=utf-8'); + if (!body) { + if (res.getHeader('Content-Length') === undefined && res.contentLength === undefined) { + res.setHeader('Content-Length', 0); + } + return null; + } + + if (body instanceof Error) { + // snoop for RestError or HttpError, but don't rely on instanceof + if ((body.restCode || body.httpCode) && body.body) { + // eslint-disable-next-line + body = body.body; + } else { + body = { + message: body.message, + }; + } + } + + if (Buffer.isBuffer(body)) { + body = body.toString('base64'); + } + + let ident = 2; + if ('prettify-json' in req.headers) { + if (req.headers['prettify-json'] === 'false') { + ident = 0; + } + } + const data = Utilities.stringify(body, ident); + + if (res.getHeader('Content-Length') === undefined && res.contentLength === undefined) { + res.setHeader('Content-Length', Buffer.byteLength(data)); + } + return data; + }, + }, + }; + + if (this.config.node_rpc_use_ssl) { + Object.assign(options, { + key: fs.readFileSync(this.config.node_rpc_ssl_key_path), + certificate: fs.readFileSync(this.config.node_rpc_ssl_cert_path), + rejectUnauthorized: true, + }); + } + + const server = restify.createServer(options); + + server.use(restify.plugins.acceptParser(server.acceptable)); + server.use(restify.plugins.queryParser()); + server.use(restify.plugins.bodyParser()); + const cors = corsMiddleware({ + preflightMaxAge: 5, // Optional + origins: ['*'], + allowHeaders: ['API-Token', 'prettify-json', 'raw-data'], + exposeHeaders: ['API-Token-Expiry'], + }); + + server.pre(cors.preflight); + server.use(cors.actual); + server.use((request, response, next) => { + const result = this.apiUtilities.authorize(request); + if (result) { + response.status(result.status); + response.send({ + message: result.message, + }); + return; + } + return next(); + }); + + // TODO: Temp solution to listen all adapters in local net. + let serverListenAddress = this.config.node_rpc_ip; + if (ip.isLoopback(serverListenAddress)) { + serverListenAddress = '0.0.0.0'; + } + + // promisified server.listen() + const startServer = () => new Promise((resolve, reject) => { + server.listen(this.config.node_rpc_port, serverListenAddress, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + + await startServer(server, serverListenAddress); + this.logger.notify(`API exposed at ${server.url}`); + + if (this.config.is_bootstrap_node) { + this._exposeBootstrapAPIRoutes(server); + } else { + this._exposeAPIRoutes(server); + } + } + + /** + * Expose bootstrap API Routes + */ + _exposeBootstrapAPIRoutes(server) { + this._registerNodeInfoRoute(server, true); + } + + /** + * API Routes + */ + _exposeAPIRoutes(server) { + const { + importController, dcController, transport, emitter, + } = this.ctx; + + this._registerNodeInfoRoute(server, false); + + /** + * Data import route + * @param importfile - file or text data + * @param importtype - (GS1/WOT) + */ + server.post('/api/import', async (req, res) => { + await importController.import(req, res); + }); + + /** + * Create offer route + */ + server.post('/api/replication', async (req, res) => { + await dcController.createOffer(req, res); + }); + + server.get('/api/dump/rt', (req, res) => { + this.logger.api('Dumping routing table'); + const message = transport.dumpContacts(); + + res.status(200); + res.send({ + message, + }); + }); + + server.get('/api/network/get-contact/:node_id', async (req, res) => { + const nodeId = req.params.node_id; + this.logger.api(`Get contact node ID ${nodeId}`); + + const result = await transport.getContact(nodeId); + const body = {}; + + if (result) { + Object.assign(body, result); + } + res.status(200); + res.send(body); + }); + + server.get('/api/network/find/:node_id', async (req, res) => { + const nodeId = req.params.node_id; + this.logger.api(`Find node ID ${nodeId}`); + + const result = await transport.findNode(nodeId); + const body = {}; + + if (result) { + Object.assign(body, result); + } + res.status(200); + res.send(body); + }); + + server.get('/api/replication/:replication_id', (req, res) => { + this.logger.api('GET: Replication status request received'); + + const replicationId = req.params.replication_id; + if (replicationId == null) { + this.logger.error('Invalid request. You need to provide replication ID'); + res.status = 400; + res.send({ + message: 'Replication ID is not provided', + }); + } else { + const queryObject = { + replicationId, + response: res, + }; + emitter.emit('api-offer-status', queryObject); + } + }); + + /** + * Get trail from database + * @param QueryObject - ex. {uid: abc:123} + */ + server.get('/api/trail', (req, res, next) => { + this.logger.api('GET: Trail request received.'); + + const error = RestAPIValidator.validateNotEmptyQuery(req.query); + if (error) { + return next(error); + } + const queryObject = req.query; + emitter.emit('api-trail', { + query: queryObject, + response: res, + }); + }); + + /** Get root hash for provided data query + * @param Query params: data_set_id + */ + server.get('/api/fingerprint', (req, res) => { + this.logger.api('GET: Fingerprint request received.'); + + const queryObject = req.query; + emitter.emit('api-get_root_hash', { + query: queryObject, + response: res, + }); + }); + + server.get('/api/query/network/:query_id', (req, res) => { + this.logger.api('GET: Query for status request received.'); + + if (!req.params.query_id) { + res.status(400); + res.send({ + message: 'Param required.', + }); + return; + } + emitter.emit('api-network-query-status', { + id: req.params.query_id, + response: res, + }); + }); + + server.get('/api/query/:query_id/responses', (req, res) => { + this.logger.api('GET: Local query responses request received.'); + + if (!req.params.query_id) { + res.status(400); + res.send({ + message: 'Param query_id is required.', + }); + return; + } + emitter.emit('api-network-query-responses', { + query_id: req.params.query_id, + response: res, + }); + }); + + server.post('/api/query/network', (req, res, next) => { + this.logger.api('POST: Network query request received.'); + + let error = RestAPIValidator.validateBodyRequired(req.body); + if (error) { + return next(error); + } + + const { query } = req.body; + error = RestAPIValidator.validateSearchQuery(query); + if (error) { + return next(error); + } + + emitter.emit('api-network-query', { + query, + response: res, + }); + }); + + /** + * Get vertices by query + * @param queryObject + */ + server.post('/api/query/local', (req, res, next) => { + this.logger.api('POST: Local query request received.'); + + let error = RestAPIValidator.validateBodyRequired(req.body); + if (error) { + return next(error); + } + + const queryObject = req.body.query; + error = RestAPIValidator.validateSearchQuery(queryObject); + if (error) { + return next(error); + } + + // TODO: Decrypt returned vertices + emitter.emit('api-query', { + query: queryObject, + response: res, + }); + }); + + server.get('/api/query/local/import/:data_set_id', (req, res) => { + this.logger.api('GET: Local import request received.'); + + if (!req.params.data_set_id) { + res.status(400); + res.send({ + message: 'Param required.', + }); + return; + } + + emitter.emit('api-query-local-import', { + data_set_id: req.params.data_set_id, + request: req, + response: res, + }); + }); + + server.post('/api/read/network', (req, res) => { + this.logger.api('POST: Network read request received.'); + + if (req.body == null || req.body.query_id == null || req.body.reply_id == null + || req.body.data_set_id == null) { + res.status(400); + res.send({ message: 'Bad request' }); + return; + } + const { query_id, reply_id, data_set_id } = req.body; + + emitter.emit('api-choose-offer', { + query_id, + reply_id, + data_set_id, + response: res, + }); + }); + + + server.post('/api/deposit', (req, res) => { + this.logger.api('POST: Deposit tokens request received.'); + + if (req.body !== null && typeof req.body.trac_amount === 'number' + && req.body.trac_amount > 0) { + const { trac_amount } = req.body; + emitter.emit('api-deposit-tokens', { + trac_amount, + response: res, + }); + } else { + res.status(400); + res.send({ message: 'Bad request' }); + } + }); + + + server.post('/api/withdraw', (req, res) => { + this.logger.api('POST: Withdraw tokens request received.'); + + if (req.body !== null && typeof req.body.trac_amount === 'number' + && req.body.trac_amount > 0) { + const { trac_amount } = req.body; + emitter.emit('api-withdraw-tokens', { + trac_amount, + response: res, + }); + } else { + res.status(400); + res.send({ message: 'Bad request' }); + } + }); + + server.get('/api/import_info', async (req, res) => { + await importController.dataSetInfo(req, res); + }); + + server.get('/api/imports_info', (req, res) => { + this.logger.api('GET: List imports request received.'); + + emitter.emit('api-imports-info', { + response: res, + }); + }); + + server.get('/api/consensus/:sender_id', (req, res) => { + this.logger.api('GET: Consensus check events request received.'); + + if (req.params.sender_id == null) { + res.status(400); + res.send({ message: 'Bad request' }); + } + + emitter.emit('api-consensus-events', { + sender_id: req.params.sender_id, + response: res, + }); + }); + + /** + * Temporary route used for HTTP network prototype + */ + server.post('/network/send', (req, res) => { + this.logger.api('P2P request received'); + + const { type } = req.body; + emitter.emit(type, req, res); + }); + } + + /** + * Register common info route + * @param server - Server instance + * @param isBootstrap - Is this a bootstrap node? + * @private + */ + _registerNodeInfoRoute(server, isBootstrap) { + const { + transport, + config, + } = this.ctx; + + server.get('/api/info', async (req, res) => { + this.logger.api('GET: Node information request received.'); + + try { + const network = await transport.getNetworkInfo(); + const basicConfig = { + version: pjson.version, + blockchain: config.blockchain.blockchain_title, + network, + is_bootstrap: isBootstrap, + }; + + if (!isBootstrap) { + Object.assign(basicConfig, { + node_wallet: config.node_wallet, + erc_725_identity: config.erc725Identity, + }); + } + + res.status(200); + res.send(basicConfig); + } catch (error) { + this.logger.error(`Failed to process /api/info route. ${error}`); + res.status(500); + res.send({ + message: error, + }); + } + }); + } +} + +module.exports = RestAPIService; diff --git a/ot-node.js b/ot-node.js index a5d08bfd36..a8690545cd 100644 --- a/ot-node.js +++ b/ot-node.js @@ -13,7 +13,6 @@ const Utilities = require('./modules/Utilities'); const GraphStorage = require('./modules/Database/GraphStorage'); const Blockchain = require('./modules/Blockchain'); const BlockchainPluginService = require('./modules/Blockchain/plugin/blockchain-plugin-service'); -const restify = require('restify'); const fs = require('fs'); const path = require('path'); const models = require('./models'); @@ -24,14 +23,11 @@ const GS1Utilities = require('./modules/GS1Utilities'); const WOTImporter = require('./modules/WOTImporter'); const Challenger = require('./modules/Challenger'); const RemoteControl = require('./modules/RemoteControl'); -const corsMiddleware = require('restify-cors-middleware'); const bugsnag = require('bugsnag'); const rc = require('rc'); -const mkdirp = require('mkdirp'); const uuidv4 = require('uuid/v4'); const awilix = require('awilix'); const homedir = require('os').homedir(); -const ip = require('ip'); const argv = require('minimist')(process.argv.slice(2)); const Graph = require('./modules/Graph'); @@ -44,8 +40,8 @@ const ApprovalService = require('./modules/service/approval-service'); const ProfileService = require('./modules/service/profile-service'); const ReplicationService = require('./modules/service/replication-service'); const ImportController = require('./modules/controller/import-controller'); -const RestAPIValidator = require('./modules/validator/rest-api-validator'); const APIUtilities = require('./modules/utility/api-utilities'); +const RestAPIService = require('./modules/service/rest-api-service'); const pjson = require('./package.json'); const configjson = require('./config/config.json'); @@ -357,7 +353,7 @@ class OTNode { gs1Importer: awilix.asClass(GS1Importer).singleton(), gs1Utilities: awilix.asClass(GS1Utilities).singleton(), wotImporter: awilix.asClass(WOTImporter).singleton(), - graphStorage: awilix.asValue(new GraphStorage(config.database, log)), + graphStorage: awilix.asValue(new GraphStorage(config.database, log, notifyBugsnag)), remoteControl: awilix.asClass(RemoteControl).singleton(), challenger: awilix.asClass(Challenger).singleton(), logger: awilix.asValue(log), @@ -369,6 +365,7 @@ class OTNode { importController: awilix.asClass(ImportController).singleton(), minerService: awilix.asClass(MinerService).singleton(), replicationService: awilix.asClass(ReplicationService).singleton(), + restAPIService: awilix.asClass(RestAPIService).singleton(), }); const blockchain = container.resolve('blockchain'); await blockchain.initialize(); @@ -463,8 +460,15 @@ class OTNode { } // Initialise API - const apiUtilities = container.resolve('apiUtilities'); - await this.startRPC(apiUtilities); + const restAPIService = container.resolve('restAPIService'); + try { + await restAPIService.startRPC(); + } catch (err) { + log.error('Failed to start RPC server'); + console.log(err); + notifyBugsnag(err); + process.exit(1); + } if (config.remote_control_enabled) { log.info(`Remote control enabled and listening on port ${config.node_remote_control_port}`); @@ -510,6 +514,7 @@ class OTNode { notifyError: awilix.asFunction(() => notifyBugsnag).transient(), transport: awilix.asValue(Transport()), apiUtilities: awilix.asClass(APIUtilities).singleton(), + restAPIService: awilix.asClass(RestAPIService).singleton(), }); const transport = container.resolve('transport'); @@ -530,8 +535,15 @@ class OTNode { approvalService.handleApprovalEvent(eventData); }); - const apiUtilities = container.resolve('apiUtilities'); - await this.startRPC(apiUtilities); + const restAPIService = container.resolve('restAPIService'); + try { + await restAPIService.startRPC(); + } catch (err) { + log.error('Failed to start RPC server'); + console.log(err); + notifyBugsnag(err); + process.exit(1); + } } /** @@ -555,428 +567,6 @@ class OTNode { } }, 5000); } - - /** - * Start RPC server - * @param apiUtilities - API utilities instance - */ - async startRPC(apiUtilities) { - const options = { - name: 'RPC server', - version: pjson.version, - formatters: { - 'application/json': (req, res, body) => { - res.set('content-type', 'application/json; charset=utf-8'); - if (!body) { - if (res.getHeader('Content-Length') === undefined && res.contentLength === undefined) { - res.setHeader('Content-Length', 0); - } - return null; - } - - if (body instanceof Error) { - // snoop for RestError or HttpError, but don't rely on instanceof - if ((body.restCode || body.httpCode) && body.body) { - // eslint-disable-next-line - body = body.body; - } else { - body = { - message: body.message, - }; - } - } - - if (Buffer.isBuffer(body)) { - body = body.toString('base64'); - } - - let ident = 2; - if ('prettify-json' in req.headers) { - if (req.headers['prettify-json'] === 'false') { - ident = 0; - } - } - const data = Utilities.stringify(body, ident); - - if (res.getHeader('Content-Length') === undefined && res.contentLength === undefined) { - res.setHeader('Content-Length', Buffer.byteLength(data)); - } - return data; - }, - }, - }; - - if (config.node_rpc_use_ssl) { - Object.assign(options, { - key: fs.readFileSync(config.node_rpc_ssl_key_path), - certificate: fs.readFileSync(config.node_rpc_ssl_cert_path), - rejectUnauthorized: true, - }); - } - - const server = restify.createServer(options); - - server.use(restify.plugins.acceptParser(server.acceptable)); - server.use(restify.plugins.queryParser()); - server.use(restify.plugins.bodyParser()); - const cors = corsMiddleware({ - preflightMaxAge: 5, // Optional - origins: ['*'], - allowHeaders: ['API-Token', 'prettify-json', 'raw-data'], - exposeHeaders: ['API-Token-Expiry'], - }); - - server.pre(cors.preflight); - server.use(cors.actual); - server.use((request, response, next) => { - const result = apiUtilities.authorize(request); - if (result) { - response.status(result.status); - response.send({ - message: result.message, - }); - return; - } - return next(); - }); - - // TODO: Temp solution to listen all adapters in local net. - let serverListenAddress = config.node_rpc_ip; - if (ip.isLoopback(serverListenAddress)) { - serverListenAddress = '0.0.0.0'; - } - - // promisified server.listen() - const startServer = () => new Promise((resolve, reject) => { - server.listen(config.node_rpc_port, serverListenAddress, (err) => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); - - try { - await startServer(server, serverListenAddress); - log.notify(`API exposed at ${server.url}`); - } catch (err) { - log.error('Failed to start RPC server'); - console.log(err); - notifyBugsnag(err); - process.exit(1); - } - - if (!config.is_bootstrap_node) { - // register API routes only if the node is not bootstrap - this.exposeAPIRoutes(server, context); - } - } - - /** - * API Routes - */ - exposeAPIRoutes(server, ctx) { - const { - emitter, importController, apiUtilities, dcController, - } = ctx; - - /** - * Data import route - * @param importfile - file or text data - * @param importtype - (GS1/WOT) - */ - server.post('/api/import', async (req, res) => { - await importController.import(req, res); - }); - - /** - * Create offer route - */ - server.post('/api/replication', async (req, res) => { - await dcController.createOffer(req, res); - }); - - server.get('/api/dump/rt', (req, res) => { - log.api('Dumping routing table'); - const message = context.transport.dumpContacts(); - - res.status(200); - res.send({ - message, - }); - }); - - server.get('/api/network/get-contact/:node_id', async (req, res) => { - const nodeId = req.params.node_id; - log.api(`Get contact node ID ${nodeId}`); - - const result = await context.transport.getContact(nodeId); - const body = {}; - - if (result) { - Object.assign(body, result); - } - res.status(200); - res.send(body); - }); - - server.get('/api/network/find/:node_id', async (req, res) => { - const nodeId = req.params.node_id; - log.api(`Find node ID ${nodeId}`); - - const result = await context.transport.findNode(nodeId); - const body = {}; - - if (result) { - Object.assign(body, result); - } - res.status(200); - res.send(body); - }); - - server.get('/api/replication/:replication_id', (req, res) => { - log.api('GET: Replication status request received'); - - const replicationId = req.params.replication_id; - if (replicationId == null) { - log.error('Invalid request. You need to provide replication ID'); - res.status = 400; - res.send({ - message: 'Replication ID is not provided', - }); - } else { - const queryObject = { - replicationId, - response: res, - }; - emitter.emit('api-offer-status', queryObject); - } - }); - - /** - * Get trail from database - * @param QueryObject - ex. {uid: abc:123} - */ - server.get('/api/trail', (req, res, next) => { - log.api('GET: Trail request received.'); - - const error = RestAPIValidator.validateNotEmptyQuery(req.query); - if (error) { - return next(error); - } - const queryObject = req.query; - emitter.emit('api-trail', { - query: queryObject, - response: res, - }); - }); - - /** Get root hash for provided data query - * @param Query params: data_set_id - */ - server.get('/api/fingerprint', (req, res) => { - log.api('GET: Fingerprint request received.'); - - const queryObject = req.query; - emitter.emit('api-get_root_hash', { - query: queryObject, - response: res, - }); - }); - - server.get('/api/query/network/:query_id', (req, res) => { - log.api('GET: Query for status request received.'); - - if (!req.params.query_id) { - res.status(400); - res.send({ - message: 'Param required.', - }); - return; - } - emitter.emit('api-network-query-status', { - id: req.params.query_id, - response: res, - }); - }); - - server.get('/api/query/:query_id/responses', (req, res) => { - log.api('GET: Local query responses request received.'); - - if (!req.params.query_id) { - res.status(400); - res.send({ - message: 'Param query_id is required.', - }); - return; - } - emitter.emit('api-network-query-responses', { - query_id: req.params.query_id, - response: res, - }); - }); - - server.post('/api/query/network', (req, res, next) => { - log.api('POST: Network query request received.'); - - let error = RestAPIValidator.validateBodyRequired(req.body); - if (error) { - return next(error); - } - - const { query } = req.body; - error = RestAPIValidator.validateSearchQuery(query); - if (error) { - return next(error); - } - - emitter.emit('api-network-query', { - query, - response: res, - }); - }); - - /** - * Get vertices by query - * @param queryObject - */ - server.post('/api/query/local', (req, res, next) => { - log.api('POST: Local query request received.'); - - let error = RestAPIValidator.validateBodyRequired(req.body); - if (error) { - return next(error); - } - - const queryObject = req.body.query; - error = RestAPIValidator.validateSearchQuery(queryObject); - if (error) { - return next(error); - } - - // TODO: Decrypt returned vertices - emitter.emit('api-query', { - query: queryObject, - response: res, - }); - }); - - server.get('/api/query/local/import/:data_set_id', (req, res) => { - log.api('GET: Local import request received.'); - - if (!req.params.data_set_id) { - res.status(400); - res.send({ - message: 'Param required.', - }); - return; - } - - emitter.emit('api-query-local-import', { - data_set_id: req.params.data_set_id, - request: req, - response: res, - }); - }); - - server.post('/api/read/network', (req, res) => { - log.api('POST: Network read request received.'); - - if (req.body == null || req.body.query_id == null || req.body.reply_id == null - || req.body.data_set_id == null) { - res.status(400); - res.send({ message: 'Bad request' }); - return; - } - const { query_id, reply_id, data_set_id } = req.body; - - emitter.emit('api-choose-offer', { - query_id, - reply_id, - data_set_id, - response: res, - }); - }); - - - server.post('/api/deposit', (req, res) => { - log.api('POST: Deposit tokens request received.'); - - if (req.body !== null && typeof req.body.trac_amount === 'number' - && req.body.trac_amount > 0) { - const { trac_amount } = req.body; - emitter.emit('api-deposit-tokens', { - trac_amount, - response: res, - }); - } else { - res.status(400); - res.send({ message: 'Bad request' }); - } - }); - - - server.post('/api/withdraw', (req, res) => { - log.api('POST: Withdraw tokens request received.'); - - if (req.body !== null && typeof req.body.trac_amount === 'number' - && req.body.trac_amount > 0) { - const { trac_amount } = req.body; - emitter.emit('api-withdraw-tokens', { - trac_amount, - response: res, - }); - } else { - res.status(400); - res.send({ message: 'Bad request' }); - } - }); - - server.get('/api/import_info', async (req, res) => { - await importController.dataSetInfo(req, res); - }); - - server.get('/api/imports_info', (req, res) => { - log.api('GET: List imports request received.'); - - emitter.emit('api-imports-info', { - response: res, - }); - }); - - server.get('/api/consensus/:sender_id', (req, res) => { - log.api('GET: Consensus check events request received.'); - - if (req.params.sender_id == null) { - res.status(400); - res.send({ message: 'Bad request' }); - } - - emitter.emit('api-consensus-events', { - sender_id: req.params.sender_id, - response: res, - }); - }); - - server.get('/api/info', (req, res) => { - log.api('GET: Node information request received.'); - - emitter.emit('api-node-info', { - response: res, - }); - }); - - /** - * Temporary route used for HTTP network prototype - */ - server.post('/network/send', (req, res) => { - log.api('P2P request received'); - - const { type } = req.body; - emitter.emit(type, req, res); - }); - } } diff --git a/package-lock.json b/package-lock.json index c1f2728e64..00248af890 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "2.0.27", + "version": "2.0.28", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -2101,7 +2101,7 @@ "dependencies": { "async": { "version": "1.5.2", - "resolved": "http://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" } } @@ -2746,7 +2746,7 @@ }, "css-select": { "version": "1.2.0", - "resolved": "http://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", "requires": { "boolbase": "1.0.0", @@ -3261,7 +3261,7 @@ "dependencies": { "domelementtype": { "version": "1.1.3", - "resolved": "http://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", "integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=" } } @@ -12695,7 +12695,7 @@ }, "needle": { "version": "1.1.2", - "resolved": "http://registry.npmjs.org/needle/-/needle-1.1.2.tgz", + "resolved": "https://registry.npmjs.org/needle/-/needle-1.1.2.tgz", "integrity": "sha1-0oQaElv9dP77MMA0QQQ2kGHD4To=", "requires": { "debug": "2.6.9", @@ -12729,7 +12729,7 @@ "dependencies": { "async": { "version": "1.5.2", - "resolved": "http://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" } } diff --git a/package.json b/package.json index 874aaf5108..37ce34dd41 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "origintrail_node", - "version": "2.0.27", + "version": "2.0.28", "description": "OriginTrail node", "main": ".eslintrc.js", "config": { @@ -17,6 +17,8 @@ "test:protocol": "nyc mocha --exit $(find test/protocol -name '*.js')", "test:protocol:nocov": "mocha --exit $(find test/protocol -name '*.js')", "test:bdd": "cucumber-js --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", + "test:bdd:first": "cucumber-js --tags=@first --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", + "test:bdd:second": "cucumber-js --tags=@second --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", "test:bdd:dryrun": "cucumber-js --dry-run test/bdd/ -r test/bdd/steps/", "test:bdd:cov": " nyc cucumber-js --fail-fast --format progress --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", "test:bdd:verbose": "cucumber-js --fail-fast --format event-protocol --format-options '{\"colorsEnabled\": true}' test/bdd/ -r test/bdd/steps/", diff --git a/test/api/zkGetTrail.test.js b/test/api/zkGetTrail.test.js index 9be95eca0b..cae5ca5238 100644 --- a/test/api/zkGetTrail.test.js +++ b/test/api/zkGetTrail.test.js @@ -62,7 +62,7 @@ describe('Check ZK by quering /api/trail for EVENT vertices', () => { injectionMode: awilix.InjectionMode.PROXY, }); - graphStorage = new GraphStorage(config.database, logger); + graphStorage = new GraphStorage(config.database, logger, () => {}); container.register({ logger: awilix.asValue(logger), gs1Importer: awilix.asClass(GS1Importer), @@ -72,6 +72,7 @@ describe('Check ZK by quering /api/trail for EVENT vertices', () => { wotImporter: awilix.asClass(WOTImporter), product: awilix.asClass(Product), config: awilix.asValue(config), + notifyError: awilix.asValue(() => {}), }); await graphStorage.connect(); gs1 = container.resolve('gs1Importer'); diff --git a/test/bdd/features/datalayer.feature b/test/bdd/features/datalayer.feature index d316f553e6..4ee5935fa2 100644 --- a/test/bdd/features/datalayer.feature +++ b/test/bdd/features/datalayer.feature @@ -3,6 +3,7 @@ Feature: Data layer related features Given the blockchain is set up And 1 bootstrap is running + @second Scenario: Check that second gs1 import does not mess up first import's hash value Given I setup 4 nodes And I start the nodes @@ -16,6 +17,7 @@ Feature: Data layer related features And DC waits for last offer to get written to blockchain Then checking again first import's root hash should point to remembered value + @second Scenario: Smoke check data-layer basic endpoints Given I setup 2 nodes And I start the nodes @@ -26,6 +28,7 @@ Feature: Data layer related features Given I query DC node locally for last imported data set id Then response hash should match last imported data set id + @second Scenario: Basic dataset integrity with it's xml Given I setup 1 node And I start the node @@ -33,6 +36,7 @@ Feature: Data layer related features And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1 Then imported data is compliant with 01_Green_to_pink_shipment.xml file + @second Scenario: Dataset immutability I Given I setup 1 node And I start the node @@ -43,4 +47,53 @@ Feature: Data layer related features And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 Given DC initiates the replication for last imported dataset And DC waits for last offer to get written to blockchain - Then DC manually calculated datasets data and root hashes matches ones from blockchain + Then DC's 2 dataset hashes should match blockchain values + + @second + Scenario: Dataset immutability II + Given I setup 1 node + And I start the node + And I use 1st node as DC + And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1 + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain + And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment_modified_event_timestamp.xml" as GS1 + Given DC initiates the replication for last imported dataset + And DC waits for last offer to get written to blockchain + Then DC's 2 dataset hashes should match blockchain values + + @second + Scenario: Imported XML's private data should be hashed + Given I setup 1 node + And I start the node + And I use 1st node as DC + And DC imports "test/modules/test_xml/GraphExample_1.xml" as GS1 + Given I query DC node locally for last imported data set id + Then DC's local query response should contain hashed private attributes + Given DC initiates the replication for last imported dataset + And DC waits for replication window to close + Given I additionally setup 1 node + And I start additional nodes + And I use 2nd node as DV + Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network + Then all nodes with last import should answer to last network query by DV + Given the DV purchases import from the last query from the DC + Given I query DV node locally for last imported data set id + Then DV's local query response should contain hashed private attributes + + @second + Scenario: Remote event connection on DH + Given I setup 5 nodes + And I start the nodes + And I use 1st node as DC + And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 + Given DC initiates the replication for last imported dataset + And I wait for replications to finish + And DC imports "importers/xml_examples/Retail/02_Green_to_Pink_receipt.xml" as GS1 + Given DC initiates the replication for last imported dataset + And I wait for replications to finish + And I use 2nd node as DH + Given DH calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Green" + Then last consensus response should have 1 event with 1 match + Given DH calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Pink" + Then last consensus response should have 1 event with 1 match \ No newline at end of file diff --git a/test/bdd/features/endpoints.feature b/test/bdd/features/endpoints.feature new file mode 100644 index 0000000000..89f1d28344 --- /dev/null +++ b/test/bdd/features/endpoints.feature @@ -0,0 +1,50 @@ +Feature: API endpoints features + Background: Setup local blockchain and bootstraps + Given the blockchain is set up + And 1 bootstrap is running + + @first + Scenario: Smoke check /api/withdraw endpoint + Given I setup 1 node + And I start the node + And I use 1st node as DC + Given I attempt to withdraw 5 tokens from DC profile + Then DC wallet and DC profile balances should diff by 5 with rounding error of 0.1 + + @first + Scenario: Smoke check /api/deposit endpoint + Given I setup 1 node + And I start the node + And I use 1st node as DC + Given I attempt to deposit 50 tokens from DC wallet + Then DC wallet and DC profile balances should diff by 50 with rounding error of 0.1 + + @first + Scenario: Smoke check /api/consensus endpoint + Given I setup 1 node + And I start the node + And I use 1st node as DC + And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 + And DC imports "importers/xml_examples/Retail/02_Green_to_Pink_receipt.xml" as GS1 + Given DC calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Green" + Then last consensus response should have 1 event with 1 match + Given DC calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Pink" + Then last consensus response should have 1 event with 1 match + + @first + Scenario: API calls should be forbidden + Given I setup 1 node + And I override configuration for all nodes + | network.remoteWhitelist | 100.100.100.100 | 200.200.200.200 | + And I start the node + And I use 1st node as DC + Then API calls will be forbidden + + @first + Scenario: API calls should not be authorized + Given I setup 1 node + And I override configuration for all nodes + | auth_token_enabled | true | + And I start the node + And I use 1st node as DC + Then API calls will not be authorized \ No newline at end of file diff --git a/test/bdd/features/erc725profile.feature b/test/bdd/features/erc725profile.feature index 00b497e13e..bf963c6fe5 100644 --- a/test/bdd/features/erc725profile.feature +++ b/test/bdd/features/erc725profile.feature @@ -3,12 +3,14 @@ Feature: ERC725 Profile features Given the blockchain is set up And 1 bootstrap is running + @second Scenario: Expect node to create profile Given I setup 1 node And I start the node Then the 1st node should have a valid ERC725 identity And the 1st node should have a valid profile + @second Scenario: Expect node to create profile and stake only once Given I setup 1 node And I start the node @@ -21,6 +23,7 @@ Feature: ERC725 Profile features And I start the node Then the 1st node should start normally + @second Scenario: Provide own ERC725 identity and expect node to create profile Given I setup 1 node When I manually create ERC725 identity for 1st node diff --git a/test/bdd/features/importer.feature b/test/bdd/features/importer.feature index fa35e0794b..90066bd144 100644 --- a/test/bdd/features/importer.feature +++ b/test/bdd/features/importer.feature @@ -3,6 +3,7 @@ Feature: Test basic importer features Given the blockchain is set up And 1 bootstrap is running + @second Scenario: Check that second WOT import does not mess up first import's hash value (same data set) Given I setup 1 node And I start the node @@ -15,6 +16,7 @@ Feature: Test basic importer features Then DC's last import's hash should be the same as one manually calculated Then checking again first import's root hash should point to remembered value + @second Scenario: Check that WOT import is connecting to the same batch from GS1 import Given I setup 1 node And I start the node diff --git a/test/bdd/features/network.feature b/test/bdd/features/network.feature index 440e84e572..3f94a86ee2 100644 --- a/test/bdd/features/network.feature +++ b/test/bdd/features/network.feature @@ -3,11 +3,13 @@ Feature: Test basic network features Given the blockchain is set up And 1 bootstrap is running + @first Scenario: Start network with 5 nodes and check do they see each other Given I setup 5 nodes And I start the nodes Then all nodes should be aware of each other + @first Scenario: Test replication DC -> DH Given the replication difficulty is 0 And I setup 5 nodes @@ -20,6 +22,7 @@ Feature: Test basic network features Then the last root hash should be the same as one manually calculated Then the last import should be the same on all nodes that replicated data + @first Scenario: DC->DH->DV replication + DV network read + DV purchase Given the replication difficulty is 0 And I setup 5 nodes @@ -39,32 +42,7 @@ Feature: Test basic network features Then the last import should be the same on DC and DV nodes Then DV's last purchase's hash should be the same as one manually calculated - - Scenario: Smoke check /api/withdraw endpoint - Given I setup 1 node - And I start the node - And I use 1st node as DC - Given I attempt to withdraw 5 tokens from DC profile - Then DC wallet and DC profile balances should diff by 5 with rounding error of 0.1 - - Scenario: Smoke check /api/deposit endpoint - Given I setup 1 node - And I start the node - And I use 1st node as DC - Given I attempt to deposit 50 tokens from DC wallet - Then DC wallet and DC profile balances should diff by 50 with rounding error of 0.1 - - Scenario: Smoke check /api/consensus endpoint - Given I setup 1 node - And I start the node - And I use 1st node as DC - And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1 - And DC imports "importers/xml_examples/Retail/02_Green_to_Pink_receipt.xml" as GS1 - Given DC calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Green" - Then last consensus response should have 1 event with 1 match - Given DC calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Pink" - Then last consensus response should have 1 event with 1 match - + @first Scenario: DV purchases data directly from DC, no DHes Given the replication difficulty is 0 And I setup 1 node @@ -82,6 +60,7 @@ Feature: Test basic network features Given the DV purchases import from the last query from the DC Then the last import should be the same on DC and DV nodes + @first Scenario: 2nd DV purchases data from 1st DV, no DHes Given the replication difficulty is 0 And I setup 1 node @@ -107,6 +86,7 @@ Feature: Test basic network features Then the last import should be the same on DC and DV nodes Then the last import should be the same on DC and DV2 nodes + @first Scenario: DV should be able to publish network query regardless of the funds # Start node and let it create own profile. It needs some ETH and TRAC for that. Given I setup 1 node @@ -120,6 +100,7 @@ Feature: Test basic network features When DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network Then everything should be ok + @first Scenario: API calls should be forbidden Given I setup 1 node And I override configuration for all nodes @@ -128,10 +109,15 @@ Feature: Test basic network features And I use 1st node as DC Then API calls will be forbidden + @first Scenario: API calls should not be authorized Given I setup 1 node And I override configuration for all nodes | auth_token_enabled | true | And I start the node And I use 1st node as DC - Then API calls will not be authorized \ No newline at end of file + Then API calls will not be authorized + + @first + Scenario: Bootstraps should have /api/info route enabled + Then 1st bootstrap should reply on info route \ No newline at end of file diff --git a/test/bdd/features/protocol-issues.feature b/test/bdd/features/protocol-issues.feature index 417373a933..7db5c678ee 100644 --- a/test/bdd/features/protocol-issues.feature +++ b/test/bdd/features/protocol-issues.feature @@ -4,6 +4,7 @@ Feature: Protocol related issues. Given the blockchain is set up And 1 bootstrap is running + @second Scenario: Expect publish to work with ghost nodes. # Use 7 nodes in total - Kadence.APLHA(3) times two plus one DC. Given I setup 7 nodes diff --git a/test/bdd/steps/datalayer.js b/test/bdd/steps/datalayer.js index e4dd1fc52a..41cbb4d208 100644 --- a/test/bdd/steps/datalayer.js +++ b/test/bdd/steps/datalayer.js @@ -57,7 +57,7 @@ Then(/^imported data is compliant with 01_Green_to_pink_shipment.xml file$/, asy productionDate: '2017-31-12T00:01:54Z', quantities: { 'urn:ot:object:actor:id:Company_Green:2018-01-01T01:00:00.000-04:00Z-04:00': { - PCS: '5d3381241af6b16260f680059e9042', + PCS: '11079ead57df77828224b3692c14118b993cb8199cfb5b8', }, }, }; @@ -71,11 +71,13 @@ Then(/^imported data is compliant with 01_Green_to_pink_shipment.xml file$/, asy ).to.be.above(0); }); -Then(/^DC manually calculated datasets data and root hashes matches ones from blockchain$/, async function () { +Then(/^DC's (\d+) dataset hashes should match blockchain values$/, async function (datasetsCount) { expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); + expect(datasetsCount >= 1, 'datasetsCount should be positive integer').to.be.true; const { dc } = this.state; const myApiImportsInfo = await httpApiHelper.apiImportsInfo(dc.state.node_rpc_url); + expect(myApiImportsInfo.length, 'We should have preciselly this many datasets').to.be.equal(datasetsCount); for (const i in Array.from({ length: myApiImportsInfo.length })) { const myDataSetId = myApiImportsInfo[i].data_set_id; @@ -90,10 +92,34 @@ Then(/^DC manually calculated datasets data and root hashes matches ones from bl expect(calculatedImportHash, 'Calculated hashes are different').to.be.equal(myDataSetId); // vertices and edges are already sorted from the response - const myMerkle = await ImportUtilities.merkleStructure(myEdgesVertices.vertices.filter(vertex => - vertex.vertex_type !== 'CLASS'), myEdgesVertices.edges); + const myMerkle = await ImportUtilities.merkleStructure(myEdgesVertices.vertices, myEdgesVertices.edges); expect(myFingerprint.root_hash, 'Fingerprint from API endpoint and manually calculated should match').to.be.equal(myMerkle.tree.getRoot()); } }); +Then(/^([DC|DV]+)'s local query response should contain hashed private attributes$/, async function (nodeType) { + expect(nodeType, 'Node type can only be DC or DV.').to.satisfy(val => (val === 'DC' || val === 'DV')); + expect(!!this.state[nodeType.toLowerCase()], 'DC/DV node not defined. Use other step to define it.').to.be.equal(true); + + expect(!!this.state.apiQueryLocalImportByDataSetIdResponse, 'Query response of last local imported data set id not defined').to.be.equal(true); + + expect(this.state.apiQueryLocalImportByDataSetIdResponse, 'Response should contain two keys').to.have.keys(['edges', 'vertices']); + + this.state.apiQueryLocalImportByDataSetIdResponse.vertices.forEach((vertex) => { + if (vertex.data) { + if (vertex.data.private) { + let sumOfHashesLengths = 0; + let randomHashLength; + Object.keys(vertex.data.private).forEach((key) => { + expect((vertex.data.private[key]).startsWith('0x'), 'Private value should start with 0x').to.be.true; + expect(utilities.isZeroHash(vertex.data.private[key]), 'Private value should not be empty hash').to.be.false; + sumOfHashesLengths += (vertex.data.private[key]).length; + randomHashLength = (vertex.data.private[key]).length; + }); + expect(sumOfHashesLengths % randomHashLength, 'All hashes should be of same length').to.equal(0); + } + } + }); +}); + diff --git a/test/bdd/steps/endpoints.js b/test/bdd/steps/endpoints.js index 9c434fc40e..d8205c7243 100644 --- a/test/bdd/steps/endpoints.js +++ b/test/bdd/steps/endpoints.js @@ -191,10 +191,10 @@ Given(/^I attempt to deposit (\d+) tokens from DC wallet[s]*$/, { timeout: 12000 return Promise.all(promises); }); -Given(/^DC calls consensus endpoint for sender: "(\S+)"$/, async function (senderId) { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); - const { dc } = this.state; - const host = dc.state.node_rpc_url; +Given(/^([DC|DH|DV]+) calls consensus endpoint for sender: "(\S+)"$/, async function (nodeType, senderId) { + expect(nodeType, 'Node type can only be DC, DH, DV.').to.be.oneOf(['DC', 'DH', 'DV']); + + const host = this.state[nodeType.toLowerCase()].state.node_rpc_url; const consensusResponse = await httpApiHelper.apiConsensus(host, senderId); expect(consensusResponse, 'Should have key called events').to.have.all.keys('events'); diff --git a/test/bdd/steps/lib/http-api-helper.js b/test/bdd/steps/lib/http-api-helper.js index afc5563101..b9a6cca979 100644 --- a/test/bdd/steps/lib/http-api-helper.js +++ b/test/bdd/steps/lib/http-api-helper.js @@ -503,6 +503,37 @@ async function apiTrail(nodeRpcUrl, query) { }); } +/** + * @typedef {Object} InfoResponse + * @property {Object} Node information + */ + +/** + * Fetch /api/info/ information + * + * @param {string} nodeRpcUrl URL in following format http://host:port + * @return {Promise.} + */ +async function apiNodeInfo(nodeRpcUrl) { + return new Promise((accept, reject) => { + request( + { + method: 'GET', + headers: { 'Content-Type': 'application/json' }, + uri: `${nodeRpcUrl}/api/info`, + json: true, + }, + (err, res, body) => { + if (err) { + reject(err); + return; + } + accept(body); + }, + ); + }); +} + module.exports = { apiImport, apiImportContent, @@ -519,4 +550,5 @@ module.exports = { apiDeposit, apiConsensus, apiTrail, + apiNodeInfo, }; diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js index 25082ff6ee..1eb4ca37e9 100644 --- a/test/bdd/steps/network.js +++ b/test/bdd/steps/network.js @@ -83,6 +83,7 @@ Given(/^(\d+) bootstrap is running$/, { timeout: 80000 }, function (nodeCount, d bootstraps: ['https://localhost:5278/#ff62cb1f692431d901833d55b93c7d991b4087f1'], remoteWhitelist: ['localhost', '127.0.0.1'], }, + }, appDataBaseDir: this.parameters.appDataBaseDir, }); @@ -310,8 +311,7 @@ Then(/^the last root hash should be the same as one manually calculated$/, async const myApiImportInfo = await httpApiHelper.apiImportInfo(dc.state.node_rpc_url, this.state.lastImport.data_set_id); // vertices and edges are already sorted from the response - const myMerkle = await ImportUtilities.merkleStructure(myApiImportInfo.import.vertices.filter(vertex => - vertex.vertex_type !== 'CLASS'), myApiImportInfo.import.edges); + const myMerkle = await ImportUtilities.merkleStructure(myApiImportInfo.import.vertices, myApiImportInfo.import.edges); expect(myFingerprint.root_hash, 'Fingerprint from API endpoint and manually calculated should match').to.be.equal(myMerkle.tree.getRoot()); }); @@ -405,8 +405,6 @@ Then(/^the last import should be the same on all nodes that replicated data$/, a this.state.lastImport.data_set_id, ); expect(dhImportInfo.transaction, 'DH transaction hash should be defined').to.not.be.undefined; - // TODO: fix different root hashes error. - dhImportInfo.root_hash = dcImportInfo.root_hash; if (deepEqual(dcImportInfo, dhImportInfo)) { accept(); } else { @@ -444,8 +442,6 @@ Then(/^the last import should be the same on DC and ([DV|DV2]+) nodes$/, async f const dvImportInfo = await httpApiHelper.apiImportInfo(dv.state.node_rpc_url, this.state.lastImport.data_set_id); - // TODO: fix different root hashes error. - dvImportInfo.root_hash = dcImportInfo.root_hash; if (!deepEqual(dcImportInfo, dvImportInfo)) { throw Error(`Objects not equal: ${JSON.stringify(dcImportInfo)} and ${JSON.stringify(dvImportInfo)}`); } @@ -634,7 +630,6 @@ Then(/^DC wallet and DC profile balances should diff by (\d+) with rounding erro }); Then(/^last consensus response should have (\d+) event with (\d+) match[es]*$/, function (eventsCount, matchesCount) { - expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true); expect(this.state.lastConsensusResponse, 'lastConsensusResponse should be already defined').to.not.be.undefined; expect(this.state.lastConsensusResponse, 'Should have key called events').to.have.all.keys('events'); @@ -738,3 +733,16 @@ Given(/^I override configuration for all nodes*$/, { timeout: 120000 }, function } done(); }); + +Given(/^(\d+)[st|nd|rd|th]+ bootstrap should reply on info route$/, { timeout: 3000000 }, async function (nodeIndex) { + expect(this.state.bootstraps.length).to.be.greaterThan(0); + expect(nodeIndex, 'Invalid index.').to.be.within(0, this.state.bootstraps.length); + + const bootstrap = this.state.bootstraps[nodeIndex - 1]; + const response = await httpApiHelper.apiNodeInfo(bootstrap.state.node_rpc_url); + + expect(response, 'response should contain version, blockchain, network and is_bootstrap keys').to.have.keys([ + 'version', 'blockchain', + 'network', 'is_bootstrap', + ]); +}); diff --git a/test/modules/gs1-importer.test.js b/test/modules/gs1-importer.test.js index 9492082dad..c775a0282a 100644 --- a/test/modules/gs1-importer.test.js +++ b/test/modules/gs1-importer.test.js @@ -134,7 +134,7 @@ describe('GS1 Importer tests', () => { // eslint-disable-next-line no-loop-func async () => { const result = await gs1.parseGS1(await Utilities.fileContents(test.args[0])); - const { response } = await importer.importJSON(result, true); + const { response } = await importer.importJSON(result, true, 1); const { vertices, edges } = response; for (const doc of edges.concat(vertices)) { diff --git a/test/modules/test_xml/GraphExample_1.xml b/test/modules/test_xml/GraphExample_1.xml index f60786ab0f..e6290dcfa3 100644 --- a/test/modules/test_xml/GraphExample_1.xml +++ b/test/modules/test_xml/GraphExample_1.xml @@ -54,8 +54,8 @@ value1 value2 - value2 - value2 + value3 + value4 @@ -72,8 +72,8 @@ value1 value2 - value2 - value2 + value3 + value4 @@ -96,8 +96,8 @@ value1 value2 - value2 - value2 + value3 + value4 @@ -115,8 +115,8 @@ value1 value2 - value2 - value2 + value3 + value4