Skip to content

Commit

Permalink
Version 2.0.33
Browse files Browse the repository at this point in the history
  • Loading branch information
kipliklotrika committed Dec 12, 2018
2 parents 754b504 + 4332861 commit db570c0
Show file tree
Hide file tree
Showing 14 changed files with 183 additions and 46 deletions.
8 changes: 4 additions & 4 deletions config/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@
"dh_max_holding_time_in_minutes": 1440,
"dh_min_token_price": "1",
"dh_min_litigation_interval_in_minutes": 5,
"deposit_on_demand": true,
"deposit_on_demand": false,
"dc_choose_time": 600000,
"requireApproval": false
},
Expand Down Expand Up @@ -295,7 +295,7 @@
"dh_max_holding_time_in_minutes": 10080,
"dh_min_token_price": "10000000000000000000",
"dh_min_litigation_interval_in_minutes": 5,
"deposit_on_demand": true,
"deposit_on_demand": false,
"dc_choose_time": 600000,
"requireApproval": false
},
Expand Down Expand Up @@ -398,7 +398,7 @@
"dh_max_holding_time_in_minutes": 10080,
"dh_min_token_price": "10000000000000000000",
"dh_min_litigation_interval_in_minutes": 5,
"deposit_on_demand": true,
"deposit_on_demand": false,
"dc_choose_time": 600000,
"requireApproval": false
},
Expand Down Expand Up @@ -501,7 +501,7 @@
"dh_max_holding_time_in_minutes": 10080,
"dh_min_token_price": "20000000000000000000",
"dh_min_litigation_interval_in_minutes": 5,
"deposit_on_demand": true,
"deposit_on_demand": false,
"dc_choose_time": 600000,
"requireApproval": true
}
Expand Down
9 changes: 7 additions & 2 deletions modules/Utilities.js
Original file line number Diff line number Diff line change
Expand Up @@ -253,9 +253,10 @@ class Utilities {
* @param web3 Instance of Web3
* @param wallet Address of the wallet.
* @param tokenContractAddress Contract address.
* @param humanReadable format result in floating point TRAC value or not i.e. 0.3.
* @returns {Promise<string | | Object>}
*/
static async getTracTokenBalance(web3, wallet, tokenContractAddress) {
static async getTracTokenBalance(web3, wallet, tokenContractAddress, humanReadable = true) {
const walletDenormalized = this.denormalizeHex(wallet);
// '0x70a08231' is the contract 'balanceOf()' ERC20 token function in hex.
const contractData = (`0x70a08231000000000000000000000000${walletDenormalized}`);
Expand All @@ -264,7 +265,11 @@ class Utilities {
data: contractData,
});
const tokensInWei = web3.utils.toBN(result).toString();
return web3.utils.fromWei(tokensInWei, 'ether');
if (humanReadable) {
return web3.utils.fromWei(tokensInWei, 'ether');
}

return tokensInWei;
}

/**
Expand Down
18 changes: 15 additions & 3 deletions modules/command/command-executor.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ const Command = require('./command');

const sleep = require('sleep-async')().Promise;

const MAX_DELAY_IN_MILLS = 14400 * 60 * 1000; // 10 days

/**
* Command statuses
* @type {{failed: string, expired: string, started: string, pending: string, completed: string}}
Expand Down Expand Up @@ -106,7 +108,7 @@ class CommandExecutor {
const waitMs = (command.ready_at + command.delay) - now;
if (waitMs > 0) {
this.logger.trace(`Command ${command.name} with ID ${command.id} should be delayed`);
await this.add(command, waitMs, false);
await this.add(command, Math.min(waitMs, MAX_DELAY_IN_MILLS), false);
return;
}

Expand Down Expand Up @@ -192,6 +194,16 @@ class CommandExecutor {
* @param insert
*/
async add(command, delay = 0, insert = true) {
const now = Date.now();

if (delay != null && delay > MAX_DELAY_IN_MILLS) {
if (command.ready_at == null) {
command.ready_at = now;
}
command.ready_at += delay;
delay = MAX_DELAY_IN_MILLS;
}

if (insert) {
command = await this._insert(command);
}
Expand Down Expand Up @@ -243,9 +255,9 @@ class CommandExecutor {
command.sequence = command.sequence.slice(1);
}
if (!command.ready_at) {
command.ready_at = Date.now();
command.ready_at = Date.now(); // take current time
}
if (!command.delay) {
if (command.delay == null) {
command.delay = 0;
}
if (!command.transactional) {
Expand Down
2 changes: 1 addition & 1 deletion modules/command/common/cleaner-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class CleanerCommand extends Command {
name: 'cleanerCommand',
data: {
},
period: 60 * 60 * 1000,
period: 4 * 24 * 60 * 60 * 1000,
transactional: false,
};
Object.assign(command, map);
Expand Down
40 changes: 40 additions & 0 deletions modules/service/rest-api-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -138,10 +138,50 @@ class RestAPIService {
_exposeAPIRoutes(server) {
const {
importController, dcController, transport, emitter,
blockchain, web3, config,
} = this.ctx;

this._registerNodeInfoRoute(server, false);

server.get('/api/balance', async (req, res) => {
this.logger.api('Get balance.');

try {
const humanReadable = req.query.humanReadable === 'true';

const walletEthBalance = await web3.eth.getBalance(config.node_wallet);
const walletTokenBalance = await Utilities.getTracTokenBalance(
web3,
config.node_wallet,
blockchain.getTokenContractAddress(),
false,
);
const profile = await blockchain.getProfile(config.erc725Identity);
const profileMinimalStake = await blockchain.getProfileMinimumStake();

const body = {
wallet: {
address: config.node_wallet,
ethBalance: humanReadable ? web3.utils.fromWei(walletEthBalance, 'ether') : walletEthBalance,
tokenBalance: humanReadable ? web3.utils.fromWei(walletTokenBalance, 'ether') : walletTokenBalance,
},
profile: {
staked: humanReadable ? web3.utils.fromWei(profile.stake, 'ether') : profile.stake,
reserved: humanReadable ? web3.utils.fromWei(profile.stakeReserved, 'ether') : profile.stakeReserved,
minimalStake: humanReadable ? web3.utils.fromWei(profileMinimalStake, 'ether') : profileMinimalStake,
},
};

res.status(200);
res.send(body);
} catch (error) {
this.logger.error(`Failed to get balance. ${error.message}.`);
res.status(503);
res.send({});
}
});


/**
* Data import route
* @param importfile - file or text data
Expand Down
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "origintrail_node",
"version": "2.0.32",
"version": "2.0.33",
"description": "OriginTrail node",
"main": ".eslintrc.js",
"config": {
Expand Down
24 changes: 23 additions & 1 deletion postman/OriginTrail.postman_collection.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"info": {
"_postman_id": "eb8e304f-5b3e-4272-b420-14ab73000f2f",
"_postman_id": "07bdc6bb-9620-41a9-b6b0-904949a48586",
"name": "OriginTrail",
"schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json"
},
Expand Down Expand Up @@ -485,6 +485,28 @@
},
"response": []
},
{
"name": "api/info",
"request": {
"method": "GET",
"header": [],
"body": {
"mode": "raw",
"raw": ""
},
"url": {
"raw": "{{baseUrl}}/api/info",
"host": [
"{{baseUrl}}"
],
"path": [
"api",
"info"
]
}
},
"response": []
},
{
"name": "/api/consensus/{{sender_id}}",
"request": {
Expand Down
44 changes: 36 additions & 8 deletions test/bdd/features/datalayer.feature
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ Feature: Data layer related features
And I start the nodes
And I use 1st node as DC
And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1
Given I query DC node locally with path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ"
Given I create json query with path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ"
And DC node makes local query with previous json query
Then response should contain only last imported data set id
Given I query DC node locally for last imported data set id
Then response hash should match last imported data set id
Expand All @@ -37,17 +38,20 @@ Feature: Data layer related features
Then imported data is compliant with 01_Green_to_pink_shipment.xml file

@second
Scenario: Dataset immutability I
Given I setup 1 node
Scenario: Dataset immutability DC and DH side
Given I setup 5 node
And I start the node
And I use 1st node as DC
And DC imports "importers/xml_examples/Basic/01_Green_to_pink_shipment.xml" as GS1
Given DC initiates the replication for last imported dataset
And DC waits for last offer to get written to blockchain
And I wait for replications to finish
And DC imports "importers/xml_examples/Retail/01_Green_to_pink_shipment.xml" as GS1
Given DC initiates the replication for last imported dataset
And DC waits for last offer to get written to blockchain
And I wait for replications to finish
Then DC's 2 dataset hashes should match blockchain values
And I use 2nd node as DH
Then DH's 2 dataset hashes should match blockchain values


@second
Scenario: Dataset immutability II
Expand Down Expand Up @@ -77,12 +81,12 @@ Feature: Data layer related features
And I use 2nd node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV
Given the DV purchases import from the last query from the DC
Given the DV purchases last import from the last query from the DC
Given I query DV node locally for last imported data set id
Then DV's local query response should contain hashed private attributes

@second
Scenario: Remote event connection on DH
Scenario: Remote event connection on DH and DV
Given I setup 5 nodes
And I start the nodes
And I use 1st node as DC
Expand All @@ -96,4 +100,28 @@ Feature: Data layer related features
Given DH calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Green"
Then last consensus response should have 1 event with 1 match
Given DH calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Pink"
Then last consensus response should have 1 event with 1 match
Then last consensus response should have 1 event with 1 match
Given I additionally setup 1 node
And I start additional nodes
And I use 6th node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV
And the DV purchases last import from the last query from a DH
Given DV publishes query consisting of path: "uid", value: "urn:epc:id:sgln:Building_Green_V1" and opcode: "EQ" to the network
Then all nodes with second last import should answer to last network query by DV
And the DV purchases second last import from the last query from a DH
And DV calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Pink"
Then last consensus response should have 1 event with 1 match
And DV calls consensus endpoint for sender: "urn:ot:object:actor:id:Company_Green"
Then last consensus response should have 1 event with 1 match

@second
Scenario: Data location with multiple identifiers
Given I setup 1 node
And I start the node
And I use 1st node as DC
And DC imports "test/modules/test_xml/MultipleIdentifiers.xml" as GS1
Given I create json query with path: "identifiers.uid", value: "urn:ot:object:product:id:P1" and opcode: "EQ"
And I append json query with path: "identifiers.ean13", value: "1234567890123" and opcode: "EQ"
Given DC node makes local query with previous json query
Then response should contain only last imported data set id
8 changes: 4 additions & 4 deletions test/bdd/features/network.feature
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ Feature: Test basic network features
And I use 6th node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV
Given the DV purchases import from the last query from a DH
Given the DV purchases last import from the last query from a DH
Then the last import should be the same on DC and DV nodes
Then DV's last purchase's hash should be the same as one manually calculated

Expand All @@ -57,7 +57,7 @@ Feature: Test basic network features
And I use 2nd node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV
Given the DV purchases import from the last query from the DC
Given the DV purchases last import from the last query from the DC
Then the last import should be the same on DC and DV nodes

@first
Expand All @@ -75,14 +75,14 @@ Feature: Test basic network features
And I use 2nd node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV
Given the DV purchases import from the last query from the DC
Given the DV purchases last import from the last query from the DC
Then the last import should be the same on DC and DV nodes
Given I additionally setup 1 node
And I start additional nodes
And I use 3rd node as DV2
Given DV2 publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
Then all nodes with last import should answer to last network query by DV2
Given the DV2 purchases import from the last query from a DV
Given the DV2 purchases last import from the last query from a DV
Then the last import should be the same on DC and DV nodes
Then the last import should be the same on DC and DV2 nodes

Expand Down
2 changes: 1 addition & 1 deletion test/bdd/features/protocol-issues.feature
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ Feature: Protocol related issues.
And I start the 7th node
And I use 7th node as DV
Given DV publishes query consisting of path: "identifiers.id", value: "urn:epc:id:sgtin:Batch_1" and opcode: "EQ" to the network
And the DV purchases import from the last query from the DC
And the DV purchases last import from the last query from the DC
Then the last import should be the same on DC and DV nodes
13 changes: 7 additions & 6 deletions test/bdd/steps/datalayer.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,21 +71,22 @@ Then(/^imported data is compliant with 01_Green_to_pink_shipment.xml file$/, asy
).to.be.above(0);
});

Then(/^DC's (\d+) dataset hashes should match blockchain values$/, async function (datasetsCount) {
expect(!!this.state.dc, 'DC node not defined. Use other step to define it.').to.be.equal(true);
Then(/^(DC|DH)'s (\d+) dataset hashes should match blockchain values$/, async function (nodeType, datasetsCount) {
expect(nodeType, 'Node type can only be DC or DH').to.be.oneOf(['DC', 'DH']);
expect(!!this.state[nodeType.toLowerCase()], 'DC/DH node not defined. Use other step to define it.').to.be.equal(true);
expect(datasetsCount >= 1, 'datasetsCount should be positive integer').to.be.true;

const { dc } = this.state;
const myApiImportsInfo = await httpApiHelper.apiImportsInfo(dc.state.node_rpc_url);
const myNode = this.state[nodeType.toLowerCase()];
const myApiImportsInfo = await httpApiHelper.apiImportsInfo(myNode.state.node_rpc_url);
expect(myApiImportsInfo.length, 'We should have preciselly this many datasets').to.be.equal(datasetsCount);

for (const i in Array.from({ length: myApiImportsInfo.length })) {
const myDataSetId = myApiImportsInfo[i].data_set_id;
const myFingerprint = await httpApiHelper.apiFingerprint(dc.state.node_rpc_url, myDataSetId);
const myFingerprint = await httpApiHelper.apiFingerprint(myNode.state.node_rpc_url, myDataSetId);
expect(utilities.isZeroHash(myFingerprint.root_hash), 'root hash value should not be zero hash').to.be.equal(false);


const myEdgesVertices = await httpApiHelper.apiQueryLocalImportByDataSetId(dc.state.node_rpc_url, myDataSetId);
const myEdgesVertices = await httpApiHelper.apiQueryLocalImportByDataSetId(myNode.state.node_rpc_url, myDataSetId);
expect(myEdgesVertices, 'Should have corresponding keys').to.have.keys(['edges', 'vertices']);

const calculatedImportHash = utilities.calculateImportHash(myEdgesVertices);
Expand Down
Loading

0 comments on commit db570c0

Please sign in to comment.