diff --git a/importers/sample_files/DC1_01-sample_product_packing.xml b/importers/sample_files/DC1_01-sample_product_packing.xml
new file mode 100644
index 0000000000..8e245af587
--- /dev/null
+++ b/importers/sample_files/DC1_01-sample_product_packing.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+ 2019-09-18T10:30:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:111111111
+
+ ADD
+ urn:epcglobal:cbv:bizstep:packing
+ urn:epcglobal:cbv:disp:active
+
+ urn:epc:id:sgln:0000000.11111.0
+
+
+ urn:epc:id:sgln:0000000.11111.0
+
+
+
+
+
\ No newline at end of file
diff --git a/importers/sample_files/DC1_02-sample_product_shipping.xml b/importers/sample_files/DC1_02-sample_product_shipping.xml
new file mode 100644
index 0000000000..9ea33a5603
--- /dev/null
+++ b/importers/sample_files/DC1_02-sample_product_shipping.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+ 2019-10-12T12:45:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:111111111
+
+ OBSERVE
+ urn:epcglobal:cbv:bizstep:shipping
+ urn:epcglobal:cbv:disp:in_progress
+
+ urn:epc:id:sgln:0000000.22222.0
+
+
+ urn:epc:id:sgln:0000000.22222.0
+
+
+ sampleFarmAtDate12102019:0x475092045ff115c5eea7679d7edae8c97a64967c
+
+
+
+
+
+
\ No newline at end of file
diff --git a/importers/sample_files/DC2_01-sample_product_receiving.xml b/importers/sample_files/DC2_01-sample_product_receiving.xml
new file mode 100644
index 0000000000..808e9d2dbd
--- /dev/null
+++ b/importers/sample_files/DC2_01-sample_product_receiving.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+ 2019-10-12T15:00:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:111111111
+
+ OBSERVE
+ urn:epcglobal:cbv:bizstep:receiving
+ urn:epcglobal:cbv:disp:in_progress
+
+ urn:epc:id:sgln:0000000.33333.0
+
+
+ urn:epc:id:sgln:0000000.33333.0
+
+
+ sampleFarmAtDate12102019:0xfc41a6e359ee40996fe08f2d2e8728d2bc204442
+
+
+
+
+
+
\ No newline at end of file
diff --git a/importers/sample_files/DC2_02-sample_transformation_event.xml b/importers/sample_files/DC2_02-sample_transformation_event.xml
new file mode 100644
index 0000000000..6822cd5911
--- /dev/null
+++ b/importers/sample_files/DC2_02-sample_transformation_event.xml
@@ -0,0 +1,32 @@
+
+
+
+
+
+
+
+ 2019-09-18T10:30:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:111111111
+ urn:epc:id:sgtin:222222222
+
+
+ urn:epc:id:sgtin:999999999
+ urn:epc:id:sgtin:888888888
+
+ urn:epcglobal:cbv:bizstep:transforming
+ urn:epcglobal:cbv:disp:in_progress
+
+ urn:epc:id:sgln:0000000.44444.0
+
+
+ urn:epc:id:sgln:0000000.44444.0
+
+
+
+
+
+
\ No newline at end of file
diff --git a/importers/sample_files/DC2_03-sample_batch_shipping.xml b/importers/sample_files/DC2_03-sample_batch_shipping.xml
new file mode 100644
index 0000000000..b685acbc71
--- /dev/null
+++ b/importers/sample_files/DC2_03-sample_batch_shipping.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+ 2019-10-14T12:00:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:999999999
+ urn:epc:id:sgtin:777777777
+
+ OBSERVE
+ urn:epcglobal:cbv:bizstep:shipping
+ urn:epcglobal:cbv:disp:in_progress
+
+ urn:epc:id:sgln:0000000.55555.0
+
+
+ urn:epc:id:sgln:0000000.55555.0
+
+
+ sampleFarmAtDate14102019:0x75bd15b2f5a5f2ead02717973e1fdf1d4c8f914a
+
+
+
+
+
diff --git a/importers/sample_files/DC3_01-sample_batch_receiving.xml b/importers/sample_files/DC3_01-sample_batch_receiving.xml
new file mode 100644
index 0000000000..8a94d64c74
--- /dev/null
+++ b/importers/sample_files/DC3_01-sample_batch_receiving.xml
@@ -0,0 +1,29 @@
+
+
+
+
+
+
+ 2019-10-19T08:00:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:999999999
+
+ OBSERVE
+ urn:epcglobal:cbv:bizstep:receiving
+ urn:epcglobal:cbv:disp:in_progress
+
+ urn:epc:id:sgln:0000000.66666.0
+
+
+ urn:epc:id:sgln:0000000.66666.0
+
+
+ sampleFarmAtDate14102019:0x475092045ff115c5eea7679d7edae8c97a64967c
+
+
+
+
+
\ No newline at end of file
diff --git a/importers/sample_files/DC3_02-sample_batch_selling.xml b/importers/sample_files/DC3_02-sample_batch_selling.xml
new file mode 100644
index 0000000000..d7be57301d
--- /dev/null
+++ b/importers/sample_files/DC3_02-sample_batch_selling.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+ 2019-10-19T10:00:00.000-06:00
+ -06:00
+
+ urn:epc:id:sgtin:999999999
+ urn:epc:id:sgtin:123123123
+
+ OBSERVE
+ urn:epcglobal:cbv:bizstep:retail_selling
+ urn:epcglobal:cbv:disp:retail_sold
+
+ urn:epc:id:sgln:0000000.77777.0
+
+
+ urn:epc:id:sgln:0000000.77777.0
+
+
+
+
+
\ No newline at end of file
diff --git a/modules/Database/Arangojs.js b/modules/Database/Arangojs.js
index 398efd6f07..3b7252bdd1 100644
--- a/modules/Database/Arangojs.js
+++ b/modules/Database/Arangojs.js
@@ -143,12 +143,31 @@ class ArangoJS {
LET trailObjects = (
FILTER startObjects[0] != null
- FOR v, e, p IN 0..@depth ANY startObjects[0] ot_edges
+ FOR v, e, p IN 0..@depth ANY startObjects[0] ot_edges`;
+ if (Array.isArray(connectionTypes) && connectionTypes.length > 0) {
+ queryString += `
+ PRUNE (LENGTH(p.edges) == 2 && p.edges[-1].relationType == p.edges[-2].relationType) || (LENGTH(p.edges) > 2 && p.edges[-1].relationType == p.edges[-2].relationType && p.edges[-3].relationType != 'CONNECTOR_FOR')
+ OPTIONS {
+ bfs: true,
+ uniqueVertices: 'global',
+ uniqueEdges: 'path'
+ }
+ FILTER (
+ ((LENGTH(p.edges) < 2) == true) ||
+ ((p.edges[-1].relationType != p.edges[-2].relationType) == true) ||
+ ((p.edges[-3].relationType == 'CONNECTOR_FOR') == true)
+ ) == true
+ FILTER p.edges[*].relationType ALL in @connectionTypes`;
+ queryParams.connectionTypes = connectionTypes;
+ } else {
+ queryString += `
OPTIONS {
bfs: true,
- uniqueVertices: 'global',
- uniqueEdges: 'path'
- }
+ uniqueVertices: 'global',
+ uniqueEdges: 'path'
+ }`;
+ }
+ queryString += `
RETURN DISTINCT v
)
@@ -156,83 +175,19 @@ class ArangoJS {
FILTER trailObject != null
LET objectsRelated = (
FOR v, e in 1..1 OUTBOUND trailObject ot_edges
- FILTER e.edgeType IN ['IdentifierRelation','dataRelation','otRelation']
+ FILTER e.edgeType IN ['IdentifierRelation','dataRelation','otRelation']
AND e.datasets != null
AND v.datasets != null
AND LENGTH(INTERSECTION(e.datasets, v.datasets, trailObject.datasets)) > 0
- RETURN {
+ RETURN {
"vertex": v,
"edge": e
- })
- RETURN {
- "rootObject": trailObject,
- "relatedObjects": objectsRelated
- }`;
- if (Array.isArray(connectionTypes) && connectionTypes.length > 0) {
- queryString = ` // Get identifier
- LET identifierObjects = TO_ARRAY(DOCUMENT('ot_vertices', @identifierKeys))
-
- // Fetch the start entity for trail
- LET startObjects = UNIQUE(FLATTEN(
- FOR identifierObject IN identifierObjects
- FILTER identifierObject != null
- LET identifiedObject = (
- FOR v, e IN 1..1 OUTBOUND identifierObject ot_edges
- FILTER e.edgeType == 'IdentifierRelation'
- RETURN v
- )
- RETURN identifiedObject
- ))
-
- LET trailObjects = (
- FILTER startObjects[0] != null
- FOR v, e, p IN 0..@depth ANY startObjects[0] ot_edges
- OPTIONS {
- bfs: true,
- uniqueVertices: 'global',
- uniqueEdges: 'path'
- }
-
- FILTER LENGTH(p.edges) < 2 ? true : (p.edges[-2].relationType != p.edges[-1].relationType)
- FILTER p.edges[*].relationType ALL in @connectionTypes
- RETURN DISTINCT { vertex: v, path: p.edges }
- )
-
- LET pairs = (
- FOR object in trailObjects
- FILTER LENGTH(object.path) > 2
- let imaDuplikate = (
- FOR relation in object.path
- FILTER POSITION(object.path, relation, true) > 0
- LET pozicija = ( RETURN POSITION(object.path, relation, true))
- RETURN NTH(object.path, pozicija - 1).relationType == relation.relationType
+ }
)
- FILTER POSITION(imaDuplikate, true)
- RETURN object
- )
-
- let trailObjects2 = (for object in trailObjects
- filter object not in pairs
- return object.vertex
- )
- FOR trailObject in trailObjects2
- FILTER trailObject != null
- LET objectsRelated = (
- FOR v, e in 1..1 OUTBOUND trailObject ot_edges
- FILTER e.edgeType IN ['IdentifierRelation','dataRelation','otRelation']
- AND e.datasets != null
- AND v.datasets != null
- AND LENGTH(INTERSECTION(e.datasets, v.datasets, trailObject.datasets)) > 0
- RETURN {
- "vertex": v,
- "edge": e
- })
- RETURN {
- "rootObject": trailObject,
- "relatedObjects": objectsRelated
- }`;
- queryParams.connectionTypes = connectionTypes;
- }
+ RETURN {
+ "rootObject": trailObject,
+ "relatedObjects": objectsRelated
+ }`;
const result = await this.runQuery(queryString, queryParams);
return result;
diff --git a/modules/command/dh/dh-litigation-answer-command.js b/modules/command/dh/dh-litigation-answer-command.js
index ef111c5261..15cd760ad7 100644
--- a/modules/command/dh/dh-litigation-answer-command.js
+++ b/modules/command/dh/dh-litigation-answer-command.js
@@ -28,7 +28,6 @@ class DHLitigationAnswerCommand extends Command {
offerId,
objectIndex,
blockIndex,
- dataSetId,
} = command.data;
const holdingData = await models.holding_data.findOne({
@@ -53,11 +52,11 @@ class DHLitigationAnswerCommand extends Command {
const litigationTimestamp = parseInt(timestamp, 10) * 1000; // seconds -> miliseconds
if (status === '1') {
- if (litigationTimestamp + (litigation_interval_in_minutes * 60000) >= Date.now()) {
+ if (litigationTimestamp + (litigation_interval_in_minutes * 60 * 1000) >= Date.now()) {
const color = this.replicationService.castNumberToColor(holdingData.color);
const otObject = await this.importService.getImportedOtObject(
- dataSetId,
+ holdingData.data_set_id,
objectIndex,
offerId,
color,
diff --git a/modules/command/dh/dh-litigation-initiated-command.js b/modules/command/dh/dh-litigation-initiated-command.js
index 9d054f9a0a..39baf6052d 100644
--- a/modules/command/dh/dh-litigation-initiated-command.js
+++ b/modules/command/dh/dh-litigation-initiated-command.js
@@ -1,6 +1,7 @@
const Command = require('../command');
const Utilities = require('../../Utilities');
const Models = require('../../../models/index');
+const constants = require('../../constants');
/**
* Repeatable command that checks whether litigation is successfully initiated
@@ -10,7 +11,6 @@ class DHLitigationInitiatedCommand extends Command {
super(ctx);
this.config = ctx.config;
this.logger = ctx.logger;
- this.dhService = ctx.dhService;
}
/**
@@ -42,11 +42,21 @@ class DHLitigationInitiatedCommand extends Command {
requestedBlockIndex,
} = JSON.parse(event.data);
- await this.dhService.handleLitigation(
- offerId,
- requestedObjectIndex,
- requestedBlockIndex,
- );
+ this.logger.warn(`Litigation initiated for offer ${offerId}, object index ${requestedObjectIndex} and block index ${requestedBlockIndex}.`);
+
+ return {
+ commands: [
+ {
+ name: 'dhLitigationAnswerCommand',
+ data: {
+ offerId,
+ objectIndex: requestedObjectIndex,
+ blockIndex: requestedBlockIndex,
+ },
+ retries: constants.ANSWER_LITIGATION_COMMAND_RETRIES,
+ },
+ ],
+ };
}
}
} catch (e) {
diff --git a/modules/service/dh-service.js b/modules/service/dh-service.js
index f52b3a40e3..8614b9520a 100644
--- a/modules/service/dh-service.js
+++ b/modules/service/dh-service.js
@@ -12,7 +12,6 @@ const Graph = require('../Graph');
const Encryption = require('../Encryption');
const ImportUtilities = require('../ImportUtilities');
const ObjectValidator = require('../validator/object-validator');
-const constants = require('../constants');
class DHService {
constructor(ctx) {
@@ -384,36 +383,6 @@ class DHService {
});
}
- /**
- * Handle started litigated
- * @param offerId - Offer ID
- * @param objectIndex - Index of the selected object from the sorted OT dataset
- * @param blockIndex - Index of the selected block from the sorted object
- * @return {Promise}
- */
- async handleLitigation(offerId, objectIndex, blockIndex) {
- this.logger.warn(`Litigation initiated for offer ${offerId}, object index ${objectIndex} and block index ${blockIndex}.`);
-
- const bid = await Models.bids.findOne({
- where: { offer_id: offerId },
- });
- if (bid == null) {
- this.logger.info(`I am not a holder for offer ${offerId}. Ignoring litigation.`);
- return;
- }
-
- await this.commandExecutor.add({
- name: 'dhLitigationAnswerCommand',
- data: {
- offerId,
- objectIndex,
- blockIndex,
- dataSetId: bid.data_set_id,
- },
- retries: constants.ANSWER_LITIGATION_COMMAND_RETRIES,
- });
- }
-
/**
* Handle one read request (checks whether node satisfies query)
* @param msgId - Message ID
diff --git a/modules/service/replication-service.js b/modules/service/replication-service.js
index d80b783b13..f2be3c0151 100644
--- a/modules/service/replication-service.js
+++ b/modules/service/replication-service.js
@@ -52,7 +52,7 @@ class ReplicationService {
for (let i = 0; i < 3; i += 1) {
const color = this.castNumberToColor(i);
- const litigationKeyPair = Encryption.generateKeyPair(512);
+ const litigationKeyPair = Encryption.generateKeyPair(2048);
const distributionKeyPair = Encryption.generateKeyPair(512);
// TODO Optimize encryption to reduce memory usage
diff --git a/modules/service/rest-api-v2.js b/modules/service/rest-api-v2.js
index 871e532632..2a20d96f6e 100644
--- a/modules/service/rest-api-v2.js
+++ b/modules/service/rest-api-v2.js
@@ -822,7 +822,7 @@ class RestAPIServiceV2 {
const object_to_export =
{
- dataset_id: requested_dataset,
+ dataset_id,
};
const inserted_object = await Models.handler_ids.create({
diff --git a/package-lock.json b/package-lock.json
index 47973849bd..70e784a6e1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "origintrail_node",
- "version": "4.0.3",
+ "version": "4.0.5",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
diff --git a/package.json b/package.json
index 4aa054ab32..15c2eadab6 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "origintrail_node",
- "version": "4.0.3",
+ "version": "4.0.5",
"description": "OriginTrail node",
"main": ".eslintrc.js",
"config": {
diff --git a/test/bdd/features/importer.feature b/test/bdd/features/importer.feature
index ede44dec2e..ecc63b2234 100644
--- a/test/bdd/features/importer.feature
+++ b/test/bdd/features/importer.feature
@@ -34,30 +34,8 @@ Feature: Test basic importer features
And DC waits for import to finish
And DC imports "importers/xml_examples/Retail/02_Green_to_Pink_receipt.xml" as GS1-EPCIS
And DC waits for import to finish
- Then the traversal from id "urn:epc:id:sgtin:Batch_1" with connection types "EPC" should contain 3 objects
-
- @fourth
- Scenario: Check that trail returns the expected objects
- Given I setup 1 node
- And I start the nodes
- And I use 1st node as DC
- And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1-EPCIS
- And DC waits for import to finish
- And DC imports "importers/xml_examples/Retail/02_Green_to_Pink_receipt.xml" as GS1-EPCIS
- And DC waits for import to finish
- And DC imports "importers/xml_examples/Retail/03_Pink_to_Orange_shipment.xml" as GS1-EPCIS
- And DC waits for import to finish
- And DC imports "importers/xml_examples/Retail/04_Pink_to_Orange_receipt.xml" as GS1-EPCIS
- And DC waits for import to finish
- And DC imports "importers/xml_examples/Retail/05_Pink_to_Red_shipment.xml" as GS1-EPCIS
- And DC waits for import to finish
- And DC imports "importers/xml_examples/Retail/06_Pink_to_Red_receipt.xml" as GS1-EPCIS
- And DC waits for import to finish
- Then the traversal from id "urn:epc:id:sgtin:Batch_1" with connection types "EPC,BIZ_LOCATION" should contain 5 objects
- And the last traversal should contain 2 objects with type "otObject.properties.vocabularyType" and value "urn:ot:object:location"
- And the last traversal should contain 1 objects with type "otObject.properties.urn:ot:object:product:batch:productId" and value "urn:ot:object:product:id:Product_1"
- And the last traversal should contain 2 objects with type "otObject.properties.objectType" and value "ObjectEvent"
- And the last traversal should contain 1 objects with type "otObject.@id" and value "urn:epc:id:sgtin:Batch_1"
+ And I call traversal from "id" "urn:epc:id:sgtin:Batch_1" with connection types "EPC"
+ Then the last traversal should contain 3 objects in total
@first
Scenario: Check that exported WOT dataset is the same as the one imported
@@ -92,7 +70,8 @@ Feature: Test basic importer features
And DC waits for import to finish
And DC imports "importers/use_cases/connectors/02_Green_to_Pink_receipt.xml" as GS1-EPCIS
And DC waits for import to finish
- Then the traversal from id "connectionId" with connection types "CONNECTION_DOWNSTREAM" should contain 2 objects
+ And I call traversal from "id" "connectionId" with connection types "CONNECTION_DOWNSTREAM"
+ Then the last traversal should contain 2 objects in total
@third
Scenario: Return all data related to a specific identifier
@@ -103,7 +82,8 @@ Feature: Test basic importer features
And DC waits for import to finish
And DC initiates the replication for last imported dataset
And I wait for replications to finish
- Then the traversal from id "100678" with connection types "EPC" should contain 5 objects
+ Then I call traversal from "id" "100678" with connection types "EPC"
+ Then the last traversal should contain 5 objects in total
And I calculate and validate the proof of the last traversal
@fourth
diff --git a/test/bdd/features/trail.feature b/test/bdd/features/trail.feature
new file mode 100644
index 0000000000..c9a0642124
--- /dev/null
+++ b/test/bdd/features/trail.feature
@@ -0,0 +1,62 @@
+
+
+Feature: Trail features
+ Background: Setup local blockchain and bootstraps
+ Given the blockchain is set up
+ And 1 bootstrap is running
+
+@second
+Scenario: Check that trail returns the expected objects
+ Given I setup 4 nodes
+ And I start the 1st node
+ And I start the 2nd node
+ And I start the 3rd node
+ And I start the 4th node
+ And I use 1st node as DC
+ And DC imports "importers/sample_files/DC1_01-sample_product_packing.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And DC imports "importers/sample_files/DC1_02-sample_product_shipping.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And I use 2nd node as DC
+ And DC imports "importers/sample_files/DC2_01-sample_product_receiving.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And DC imports "importers/sample_files/DC2_02-sample_transformation_event.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And DC imports "importers/sample_files/DC2_03-sample_batch_shipping.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And I use 3rd node as DC
+ And DC imports "importers/sample_files/DC3_01-sample_batch_receiving.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ And DC imports "importers/sample_files/DC3_02-sample_batch_selling.xml" as GS1-EPCIS
+ And DC waits for import to finish
+ And DC initiates the replication for last imported dataset
+ And I wait for replications to finish
+ # Set 4th node as DC because trail is called from DC node for now -> Expand this functionality in the future
+ And I use 4th node as DC
+ And I call traversal from "sgtin" "urn:epc:id:sgtin:111111111" with connection types "EPC,PARENT_EPC,CHILD_EPC,INPUT_EPC,OUTPUT_EPC,CONNECTOR_FOR,CONNECTION_DOWNSTREAM"
+ Then the last traversal should contain 2 objects with type "otObject.@id" and value "urn:epc:id:sgtin:111111111"
+ And the last traversal should contain 2 objects with type "otObject.@id" and value "urn:epc:id:sgtin:999999999"
+ And the last traversal should contain 1 objects with type "otObject.@id" and value "urn:epc:id:sgtin:888888888"
+ And the last traversal should contain 4 objects with type "otObject.@type" and value "otConnector"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "packing_product"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "shipping_product"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "receiving_product"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "transforming"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "shipping_batch"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "receiving_batch"
+ And the last traversal should contain 1 objects with type "otObject.properties.___metadata._attributes.id" and value "selling_batch"
+ And the last traversal should contain 6 objects with type "otObject.properties.objectType" and value "ObjectEvent"
+ And the last traversal should contain 16 objects in total
+
diff --git a/test/bdd/steps/blockchain.js b/test/bdd/steps/blockchain.js
index 26428c99bd..389112d151 100644
--- a/test/bdd/steps/blockchain.js
+++ b/test/bdd/steps/blockchain.js
@@ -18,6 +18,7 @@ Given(/^the blockchain is set up$/, { timeout: 60000 }, function (done) {
});
Given(/^the replication difficulty is (\d+)$/, async function (difficulty) {
+ this.logger.log(`The replication difficulty is ${difficulty}`);
expect(
this.state.localBlockchain && this.state.localBlockchain.isInitialized,
'localBlockchain not initialized',
diff --git a/test/bdd/steps/datalayer.js b/test/bdd/steps/datalayer.js
index 05dfcde6ea..795be86c22 100644
--- a/test/bdd/steps/datalayer.js
+++ b/test/bdd/steps/datalayer.js
@@ -1,7 +1,7 @@
/* eslint-disable no-unused-expressions, max-len, no-await-in-loop */
const {
- Then,
+ Then, Given,
} = require('cucumber');
const { expect } = require('chai');
@@ -137,30 +137,23 @@ Then(/^([DC|DV]+)'s local query response should contain hashed private attribute
});
});
-Then(
- /^the traversal from id "(\S+)" with connection types "(\S+)" should contain (\d+) objects/,
+Given(
+ /^I call traversal from "(\S+)" "(\S+)" with connection types "(\S+)"/,
{ timeout: 120000 },
- async function (id, connectionTypes, expectedNumberOfObjects) {
+ async function (id_type, id_value, connectionTypes) {
expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true);
const { dc } = this.state;
const host = dc.state.node_rpc_url;
const trailParams = {
- identifier_types: ['id'],
- identifier_values: [id],
+ identifier_types: [id_type],
+ identifier_values: [id_value],
connection_types: connectionTypes.split(','),
- depth: 10,
+ depth: 50,
};
const trail = await httpApiHelper.apiTrail(host, trailParams);
- expect(trail, 'should not be null').to.not.be.undefined;
- expect(trail, 'should be an Array').to.be.an.instanceof(Array);
- expect(
- trail.length,
- `Traversal result should contain ${expectedNumberOfObjects} object(s)`,
- ).to.be.equal(expectedNumberOfObjects);
-
this.state.lastTrail = trail;
},
);
@@ -282,6 +275,19 @@ Then(
},
);
+Then(
+ /^the last traversal should contain (\d+) objects in total/,
+ async function (expectedNumberOfObjects) {
+ expect(!!this.state.lastTrail, 'Last traversal not defined. Use other step to define it.').to.be.equal(true);
+ const { lastTrail } = this.state;
+
+ expect(
+ lastTrail.length,
+ `Traversal should contain ${expectedNumberOfObjects} objects`,
+ ).to.be.equal(expectedNumberOfObjects);
+ },
+);
+
Then(
'Corrupted node should not have last replication dataset',
async function () {
diff --git a/test/bdd/steps/litigation.js b/test/bdd/steps/litigation.js
index 14fa60ccf1..480619d464 100644
--- a/test/bdd/steps/litigation.js
+++ b/test/bdd/steps/litigation.js
@@ -67,6 +67,7 @@ Given(/^I start (\d+)[st|nd|rd|th]+ stopped holder*$/, { timeout: 300000 }, func
});
Then(/^(\d+)[st|nd|rd|th]+ holder to litigate should answer litigation$/, { timeout: 300000 }, async function (nodeIndex) {
+ this.logger.log(`${nodeIndex} holder to litigate should answer litigation`);
expect(this.state.bootstraps.length).to.be.greaterThan(0);
expect(this.state.nodes.length).to.be.greaterThan(0);
@@ -192,6 +193,7 @@ Then(/^I wait for replacement to be completed$/, { timeout: 300000 }, function (
});
Given(/^I wait for challenges to start$/, { timeout: 300000 }, async function () {
+ this.logger.log('I wait for challenges to start');
expect(this.state.bootstraps.length).to.be.greaterThan(0);
expect(this.state.nodes.length).to.be.greaterThan(0);
diff --git a/test/bdd/steps/network.js b/test/bdd/steps/network.js
index 49ce831170..834e5cb5e5 100644
--- a/test/bdd/steps/network.js
+++ b/test/bdd/steps/network.js
@@ -144,7 +144,7 @@ Given(/^I setup (\d+) node[s]*$/, { timeout: 120000 }, function (nodeCount, done
rpc_server_url: 'http://localhost:7545/', // TODO use from instance
},
local_network_only: true,
- dc_choose_time: 120000, // 2 minute
+ dc_choose_time: 90000, // 90 seconds
initial_deposit_amount: '10000000000000000000000',
commandExecutorVerboseLoggingEnabled: true,
};
@@ -176,6 +176,7 @@ Given(/^DC waits for holding time*$/, { timeout: 120000 }, async function () {
});
Given(/^I start the node[s]*$/, { timeout: 3000000 }, function (done) {
+ this.logger.log('I start the nodes');
expect(this.state.bootstraps.length).to.be.greaterThan(0);
expect(this.state.nodes.length).to.be.greaterThan(0);
@@ -436,6 +437,7 @@ Then(/^the last exported dataset data should be the same as "([^"]*)"$/, async f
});
Then(/^the last root hash should be the same as one manually calculated$/, async function () {
+ this.logger.log('The last root hash should be the same as one manually calculated$');
expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true);
expect(this.state.nodes.length, 'No started nodes').to.be.greaterThan(0);
expect(this.state.bootstraps.length, 'No bootstrap nodes').to.be.greaterThan(0);
@@ -575,6 +577,7 @@ Given(/^I wait for (\d+)[st|nd|rd|th]+ node to verify replication$/, { timeout:
});
Then(/^the last import should be the same on all nodes that replicated data$/, async function () {
+ this.logger.log('The last import should be the same on all nodes that replicated data$');
expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true);
expect(!!this.state.lastImport, 'Nothing was imported. Use other step to do it.').to.be.equal(true);
expect(!!this.state.lastReplicationHandler, 'Nothing was replicated. Use other step to do it.').to.be.equal(true);
diff --git a/testnet/register-node.js b/testnet/register-node.js
index f308f24506..c66bcfbd26 100644
--- a/testnet/register-node.js
+++ b/testnet/register-node.js
@@ -102,11 +102,21 @@ function checkForUpdate() {
execSync(`cp -af ${appMigrationDirPath}/. ${configDir}`);
// Potential risk of race condition here. Coping and linking has to be atomic operation.
-
+ const previousVersionPath = fs.realpathSync('/ot-node/current');
// Just replace current link.
execSync(`ln -fns ${updateInfo.path} /ot-node/current`);
- // TODO: Remove old version dir.
+ const fileList = fs.readdirSync('/ot-node');
+ fileList.forEach((fileName) => {
+ const filePath = `/ot-node/${fileName}`;
+ if (fs.lstatSync(filePath).isDirectory()
+ && filePath !== updateInfo.path
+ && filePath !== previousVersionPath
+ && /^\d+\.\d+\.\d+$/.test(fileName)) {
+ fs.rmdirSync(filePath);
+ logger.trace(`Successfully removed old version directory: ${filePath}`);
+ }
+ });
logger.important(`OT Node updated to ${updateInfo.version}. Resetting...`);
process.exit(2);