Skip to content

Commit

Permalink
Merge pull request #1144 from OriginTrail/prerelease/mainnet
Browse files Browse the repository at this point in the history
OriginTrail Release v4.0.3
  • Loading branch information
Kuki145 authored Jan 27, 2020
2 parents e9a9ed2 + 5d71505 commit 4a49eb0
Show file tree
Hide file tree
Showing 31 changed files with 604 additions and 447 deletions.
21 changes: 21 additions & 0 deletions .github/release-drafter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
name-template: 'v$NEXT_PATCH_VERSION 🌈'
tag-template: 'v$NEXT_PATCH_VERSION'
categories:
- title: '🚀 Features'
labels:
- 'feature'
- 'enhancement'
- title: '🐛 Bug Fixes'
labels:
- 'fix'
- 'bugfix'
- 'bug'
- title: '🧰 Maintenance'
labels:
- 'chore'
- 'internal process'
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
template: |
## Changes
$CHANGES
17 changes: 17 additions & 0 deletions .github/workflows/release-drafter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Release Drafter

on:
push:
# branches to consider in the event; optional, defaults to all
branches:
- develop

jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
# Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v5
with:
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
# config-name: my-config.yml
34 changes: 34 additions & 0 deletions modules/ImportUtilities.js
Original file line number Diff line number Diff line change
Expand Up @@ -574,6 +574,40 @@ class ImportUtilities {

return header;
}

/**
* Extract Dataset creator identifier value from OT-JSON or graph header
* @static
* @param datasetHeader Header of the dataset in which the dataCreator field exists
* @returns String - Dataset creator identifier value (Currently ERC725 Identity)
*/
static getDataCreator(datasetHeader) {
return datasetHeader.dataCreator.identifiers[0].identifierValue;
}

/**
* Process successfull import
* @static
* @param unpack Unpack keys
* @param objects Graph vertices and edges
* @return {Promise<>}
*/
static unpackKeysAndSortVertices(objects, unpack = false) {
let {
vertices, edges,
} = objects;
if (unpack) {
ImportUtilities.unpackKeys(vertices, edges);
}

edges = Graph.sortVertices(edges);
vertices = Graph.sortVertices(vertices);

return {
vertices,
edges,
};
}
}

module.exports = ImportUtilities;
2 changes: 0 additions & 2 deletions modules/command/dc/dc-challenges-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,13 @@ class DCChallengesCommand extends Command {
offer_id: challenge.offer_id,
},
});

if (challenged.status !== 'HOLDING') {
return;
}

challenge.status = 'IN_PROGRESS';
await challenge.save({ fields: ['status'] });


challenged.status = 'CHALLENGING';
await challenged.save({ fields: ['status'] });

Expand Down
1 change: 1 addition & 0 deletions modules/command/dc/dc-convert-to-graph-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class DcConvertToGraphCommand extends Command {
data: {
error: { message: error.message },
handler_id: command.data.handler_id,
documentPath: command.data.documentPath,
},
});
}
Expand Down
17 changes: 13 additions & 4 deletions modules/command/dc/dc-convert-to-ot-json-command.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
const path = require('path');
const fs = require('fs');
const Command = require('../command');
const ImportUtilities = require('../../ImportUtilities');

Expand All @@ -16,11 +18,17 @@ class DcConvertToOtJsonCommand extends Command {
* @param command
*/
async execute(command) {
const { standard_id } = command.data;
const { standard_id, documentPath, handler_id } = command.data;
try {
if (standard_id === 'ot-json') {
command.data.document = JSON.parse(command.data.document);
if (!command.data.document.signature) { command.data.document = ImportUtilities.prepareDataset(command.data.document['@graph'], this.config, this.web3); }
let document = JSON.parse(fs.readFileSync(documentPath, { encoding: 'utf-8' }));

if (!document.signature) {
document = ImportUtilities.prepareDataset(document['@graph'], this.config, this.web3);
}

fs.writeFileSync(documentPath, JSON.stringify(document));

return this.continueSequence(command.data, command.sequence);
}
await this.importWorkerController.startOtjsonConverterWorker(command, standard_id);
Expand All @@ -31,7 +39,8 @@ class DcConvertToOtJsonCommand extends Command {
transactional: false,
data: {
error: { message: error.message },
handler_id: command.data.handler_id,
handler_id,
documentPath,
},
});
}
Expand Down
87 changes: 26 additions & 61 deletions modules/command/dc/dc-finalize-import-command.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
const fs = require('fs');
const Command = require('../command');
const Models = require('../../../models');
const bytes = require('utf8-length');
const Utilities = require('../../Utilities');
const { sha3_256 } = require('js-sha3');
const ImportUtilities = require('../../ImportUtilities');
const Graph = require('../../Graph');

class DcFinalizeImport extends Command {
constructor(ctx) {
Expand All @@ -20,38 +17,38 @@ class DcFinalizeImport extends Command {
* @param command
*/
async execute(command) {
const { afterImportData, error } = command.data;
if (error) {
await this._processError(error, command.data.handler_id);
return Command.empty();
}
const response = await this._unpackKeysAndSortVertices(afterImportData);

const {
handler_id, otjson_size_in_bytes, total_documents, purchased,
} = afterImportData;
const {
error,
handler_id,
data_set_id,
data_provider_wallet,
purchased,
documentPath,
root_hash,
wallet, // TODO: Sender's wallet is ignored for now.
vertices,
edges,
} = response;
data_hash,
otjson_size_in_bytes,
total_documents,
} = command.data;

await Utilities.deleteDirectory(documentPath);

if (error) {
await this._processError(error, handler_id, documentPath);
return Command.empty();
}

try {
const importTimestamp = new Date();
const graphObject = {};
Object.assign(graphObject, { vertices, edges });
const dataHash = Utilities.normalizeHex(sha3_256(`${graphObject}`));
const import_timestamp = new Date();
this.remoteControl.importRequestData();
await Models.data_info.create({
data_set_id,
root_hash,
data_provider_wallet: this.config.node_wallet,
import_timestamp: importTimestamp,
data_provider_wallet: data_provider_wallet || this.config.node_wallet,
import_timestamp,
total_documents,
origin: purchased ? 'PURCHASED' : 'IMPORTED',
otjson_size_in_bytes,
data_hash: dataHash,
data_hash,
}).catch(async (error) => {
this.logger.error(error);
this.notifyError(error);
Expand All @@ -76,12 +73,10 @@ class DcFinalizeImport extends Command {
status: 'COMPLETED',
data: JSON.stringify({
dataset_id: data_set_id,
import_time: importTimestamp.valueOf(),
import_time: import_timestamp.valueOf(),
otjson_size_in_bytes,
root_hash,
data_hash: dataHash,
total_graph_entities: vertices.length
+ edges.length,
data_hash,
}),
},
{
Expand All @@ -90,6 +85,7 @@ class DcFinalizeImport extends Command {
},
},
);

this.logger.info('Import complete');
this.logger.info(`Root hash: ${root_hash}`);
this.logger.info(`Data set ID: ${data_set_id}`);
Expand Down Expand Up @@ -130,7 +126,7 @@ class DcFinalizeImport extends Command {
return command;
}

async _processError(error, handlerId) {
async _processError(error, handlerId, documentPath) {
this.logger.error(error.message);
await Models.handler_ids.update(
{
Expand All @@ -151,37 +147,6 @@ class DcFinalizeImport extends Command {
this.notifyError(error);
}
}

/**
* Process successfull import
* @param unpack Unpack keys
* @param result Import result
* @return {Promise<>}
*/
_unpackKeysAndSortVertices(result, unpack = false) {
this.remoteControl.importRequestData();
const {
data_set_id, wallet, root_hash,
} = result;
let {
vertices, edges,
} = result;
if (unpack) {
ImportUtilities.unpackKeys(vertices, edges);
}

edges = Graph.sortVertices(edges);
vertices = Graph.sortVertices(vertices);

return {
data_set_id,
root_hash,
total_documents: edges.length + vertices.length,
vertices,
edges,
wallet,
};
}
}

module.exports = DcFinalizeImport;
43 changes: 10 additions & 33 deletions modules/command/dc/dc-litigation-completed-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,40 +66,12 @@ class DCLitigationCompletedCommand extends Command {
});
this.logger.info(`Challenges removed for DH with identity ${dhIdentity} and offer ${offerId}.`);

const offer = await models.offers.findOne({
where: {
offer_id: offerId,
},
});

offer.global_status = 'REPLACEMENT_STARTED';
await offer.save({ fields: ['global_status'] });
this.remoteControl.offerUpdate({
offer_id: offerId,
});

await models.reputation_data.create({
dh_identity: dhIdentity,
offer_id: offerId,
reputation_delta: '-1',
timestamp: Date.now(),
});

return {
commands: [
{
data: {
offerId,
dhIdentity,
},
name: 'dcLitigationReplacementStartedCommand',
delay: 0,
period: 5000,
deadline_at: Date.now() + (5 * 60 * 1000),
transactional: false,
},
],
};
}

const offer = await models.offers.findOne({
Expand All @@ -108,12 +80,17 @@ class DCLitigationCompletedCommand extends Command {
},
});

offer.global_status = 'ACTIVE';
await offer.save({ fields: ['global_status'] });
this.remoteControl.offerUpdate({
offer_id: offerId,
const holdingCount = await models.replicated_data.count({
where: { offer_id: offerId, status: 'HOLDING' },
});
this.logger.important(`DH ${dhIdentity} has successfully answered litigation.`);

if (holdingCount === 0) {
offer.global_status = 'FAILED';
await offer.save({ fields: ['global_status'] });
this.remoteControl.offerUpdate({
offer_id: offerId,
});
}
return Command.empty();
}
}
Expand Down
18 changes: 12 additions & 6 deletions modules/command/dc/dc-litigation-initiate-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,17 +50,23 @@ class DCLitigationInitiateCommand extends Command {
return Command.empty();
}

if (offer.global_status !== 'ACTIVE') {
const replicatedData = await models.replicated_data.findOne({
where: { offer_id: offerId, dh_identity: dhIdentity },
});

if (replicatedData.status === 'PENALIZED') {
this.logger.trace(`Holder with id: ${dhIdentity} for offer ${offerId} was already penalized`);
return Command.empty();
}

if (replicatedData.status !== 'CHALLENGING') {
// litigation or replacement is in progress
this.logger.trace(`Litigation already in progress... It needs to be completed in order to litigate ${dhIdentity} for offer ${offerId}`);
return Command.repeat(); // wait for offer to be active
}

offer.global_status = 'LITIGATION_INITIATED';
await offer.save(({ fields: ['global_status'] }));
this.remoteControl.offerUpdate({
offer_id: offerId,
});
replicatedData.status = 'LITIGATION_STARTED';
await replicatedData.save({ fields: ['status'] });

const dcIdentity = utilities.normalizeHex(this.config.erc725Identity);
const otJson = await this.importService.getImport(offer.data_set_id);
Expand Down
6 changes: 0 additions & 6 deletions modules/command/dc/dc-litigation-initiated-command.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,6 @@ class DcLitigationInitiatedCommand extends Command {

this.logger.important(`Litigation initiated for DH ${dhIdentity} and offer ${offerId}.`);

const replicatedData = await Models.replicated_data.findOne({
where: { offer_id: offerId, dh_identity: dhIdentity },
});
replicatedData.status = 'LITIGATION_STARTED';
await replicatedData.save({ fields: ['status'] });

const offer = await Models.offers.findOne({
where: { offer_id: offerId },
});
Expand Down
Loading

0 comments on commit 4a49eb0

Please sign in to comment.