diff --git a/config/config.json b/config/config.json index ef0aad86e5..bb9dcf22df 100644 --- a/config/config.json +++ b/config/config.json @@ -508,9 +508,8 @@ { "blockchain_title": "Polygon", "network_id": "polygon:mainnet", - "rpc_server_url": "https://rpc-mainnet.matic.network", "chain_id": 137, - "hub_contract_address": "", + "hub_contract_address": "0x86dB1592CD083dD7c758196dB55f317fed6A0a22", "identity_filepath": "polygon_erc725_identity.json", "gas_limit": "2000000", "gas_price": "1000000000", diff --git a/modules/command/common/dataset-pruning-command.js b/modules/command/common/dataset-pruning-command.js index 589da53a59..32e388b49c 100644 --- a/modules/command/common/dataset-pruning-command.js +++ b/modules/command/common/dataset-pruning-command.js @@ -20,7 +20,7 @@ class DatasetPruningCommand extends Command { this.logger.debug('Dataset pruning command ignored.'); return Command.empty(); } - + this.logger.trace('Dataset pruning command started. This command will work in background and will try to remove expired and low estimated value datasets.'); const datasets = await this.datasetPruningService.fetchDatasetData(); const repackedDatasets = this.datasetPruningService.repackDatasets(datasets); @@ -34,7 +34,9 @@ class DatasetPruningCommand extends Command { .replicated_pruning_delay_in_minutes, ); const forked = fork('modules/worker/dataset-pruning-worker.js'); - + if (idsForPruning.datasetsToBeDeleted.length !== 0) { + this.logger.trace(`Removing ${idsForPruning.datasetsToBeDeleted.length} expired datasets.`); + } forked.send(JSON.stringify({ selectedDatabase: this.config.database, idsForPruning, @@ -43,7 +45,7 @@ class DatasetPruningCommand extends Command { forked.on('message', async (response) => { if (response.error) { - this.logger.error(`Error while pruning datasets. Error message: ${response.error.message}`); + this.logger.error(`Error while pruning datasets. Error message: ${response.error.message}. Pruning command will be executed again in ${constants.DATASET_PRUNING_COMMAND_TIME_MILLS / (1000 * 60 * 60)} hours`); forked.kill(); await this.addPruningCommandToExecutor(); return; @@ -76,6 +78,7 @@ class DatasetPruningCommand extends Command { .getLowEstimatedValueIdsForPruning(repackedDatasets); if (idsForPruning.datasetsToBeDeleted.length !== 0) { + this.logger.trace(`Removing ${idsForPruning.datasetsToBeDeleted.length} low estimated value datasets.`); forked.send(JSON.stringify({ selectedDatabase: this.config.database, idsForPruning, diff --git a/modules/service/dataset-pruning-service.js b/modules/service/dataset-pruning-service.js index 1471b966cf..0f8fda82b9 100644 --- a/modules/service/dataset-pruning-service.js +++ b/modules/service/dataset-pruning-service.js @@ -244,7 +244,6 @@ class DatasetPruningService { `Minimum size of Graph DB is 20% of total disk size. Current Graph DB folder size is: ${arangoDbEngineFolderSize}kb`); return false; } - this.logger.debug('Bulk pruning of low estimated datasets will be executed.'); return true; } catch (error) { this.logger.error('Error while trying to determine should low estimated datasets be pruned. Error: ', error.message);