Skip to content

Commit

Permalink
Merge pull request #1934 from OriginTrail/v6/prerelease/testnet
Browse files Browse the repository at this point in the history
OriginTrail 6.0.0-beta.1.35 Testnet Release
  • Loading branch information
kotlarmilos authored Apr 20, 2022
2 parents e5abb32 + 4845e09 commit d846179
Show file tree
Hide file tree
Showing 18 changed files with 2,140 additions and 156 deletions.
3 changes: 2 additions & 1 deletion Alpine.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ RUN tar xzf ./remote_syslog_linux_amd64.tar.gz && cd remote_syslog && cp ./remot

COPY config/papertrail.yml /etc/log_files.yml

#Install nodemon & forever
#Install nodemon, git & forever
RUN npm install forever -g
RUN apk add git

WORKDIR /ot-node

Expand Down
43 changes: 1 addition & 42 deletions external/blazegraph-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -109,48 +109,7 @@ class BlazegraphService {
?s ?p ?o
}
}`;
let nquads = await this.construct(query);

if (nquads.length) {
nquads = nquads.toString();
nquads = nquads.split('\n');
nquads = nquads.filter((x) => x !== '');
nquads = await this.transformBlankNodes(nquads);
} else {
nquads = null;
}
return nquads;
}

async transformBlankNodes(nquads) {
// Find minimum blank node value to assign it to _:c14n0
let minimumBlankNodeValue = -1;
for (const nquad of nquads) {
if (nquad.includes('_:t')) {
const blankNodes = nquad.split(' ').filter((s) => s.includes('_:t'));
for (const bn of blankNodes) {
const bnValue = Number(bn.substring(3));
if (minimumBlankNodeValue === -1 || minimumBlankNodeValue > bnValue) {
minimumBlankNodeValue = bnValue;
}
}
}
}

// Transform blank nodes, example: _:t145 -> _:c14n3
let bnName;
for (const nquadIndex in nquads) {
const nquad = nquads[nquadIndex];
if (nquad.includes('_:t')) {
const blankNodes = nquad.split(' ').filter((s) => s.includes('_:t'));
for (const bn of blankNodes) {
const bnValue = Number(bn.substring(3));
bnName = `_:c14n${bnValue - minimumBlankNodeValue}`;
nquads[nquadIndex] = nquads[nquadIndex].replace(bn, bnName);
}
}
}

const nquads = await this.construct(query);
return nquads;
}

Expand Down
202 changes: 202 additions & 0 deletions external/fuseki-service.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
const axios = require('axios');
const qs = require('qs');
const constants = require('../modules/constants');

class FusekiService {
constructor(config) {
this.config = config;
}

async initialize(logger) {
this.logger = logger;
this.config.axios = {
method: 'post',
url: `${this.config.url}/${this.config.repositoryName}`,
};
this.logger.info('Fuseki module initialized successfully');
}

async insert(triples, rootHash) {
const askQuery = `ASK WHERE { GRAPH <${rootHash}> { ?s ?p ?o } }`;
const exists = await this.ask(askQuery);
if (!exists) {
this.config.axios = {
method: 'put',
url: `${this.config.url}/${this.config.repositoryName}/data?graph=${rootHash}`,
headers: {
'Content-Type': 'application/n-quads',
},
data: triples,
};

await axios(this.config.axios).then(() => true)
.catch((error) => {
this.logger.error({
msg: `Failed to write into Fuseki: ${error} - ${error.stack}`,
Event_name: constants.ERROR_TYPE.TRIPLE_STORE_INSERT_ERROR,
});
return false;
});
}
// TODO: else -> Should log if we already have data
}

async execute(query) {
return new Promise(async (accept, reject) => {
const data = qs.stringify({
query,
});
this.config.axios = {
method: 'post',
url: `${this.config.url}/${this.config.repositoryName}/sparql`,
headers: {
Accept: 'application/sparql-results+json',
'Content-Type': 'application/x-www-form-urlencoded',
},
data,
};
axios(this.config.axios).then((response) => {
accept(response.data);
}).catch((e) => reject(e));
});
}

async construct(query) {
return new Promise(async (accept, reject) => {
const data = qs.stringify({
query,
});
this.config.axios = {
method: 'post',
url: `${this.config.url}/${this.config.repositoryName}/sparql`,
headers: {
Accept: 'application/n-quads',
'Content-Type': 'application/x-www-form-urlencoded',
},
data,
};
axios(this.config.axios).then((response) => {
accept(response.data);
}).catch((e) => reject(e));
});
}

async ask(query) {
return new Promise(async (accept, reject) => {
const data = qs.stringify({
query,
});
this.config.axios = {
method: 'post',
url: `${this.config.url}/${this.config.repositoryName}/sparql`,
headers: {
Accept: 'application/json',
'Content-Type': 'application/x-www-form-urlencoded',
},
data,
};
axios(this.config.axios).then((response) => {
accept(response.data.boolean);
}).catch((e) => reject(e));
});
}

async resolve(uri) {
const query = `PREFIX schema: <http://schema.org/>
CONSTRUCT { ?s ?p ?o }
WHERE {
GRAPH <${constants.DID_PREFIX}:${uri}> {
?s ?p ?o
}
}`;
const nquads = await this.construct(query);
return nquads;
}

async assertionsByAsset(uri) {
const query = `PREFIX schema: <http://schema.org/>
SELECT ?assertionId ?issuer ?timestamp
WHERE {
?assertionId schema:hasUALs "${uri}" ;
schema:hasTimestamp ?timestamp ;
schema:hasIssuer ?issuer .
}
ORDER BY DESC(?timestamp)`;
const result = await this.execute(query);

return result.results.bindings;
}

async findAssertions(nquads) {
const query = `SELECT ?g
WHERE {
GRAPH ?g {
${nquads}
}
}`;
let graph = await this.execute(query);
graph = graph.results.bindings.map((x) => x.g.value.replace(`${constants.DID_PREFIX}:`, ''));
if (graph.length && graph[0] === 'http://www.bigdata.com/rdf#nullGraph') {
return [];
}
return graph;
}

async findAssertionsByKeyword(query, options, localQuery) {
const sparqlQuery = `PREFIX schema: <http://schema.org/>
SELECT distinct ?assertionId
WHERE {
?assertionId schema:hasKeywords ?keyword .
${!localQuery ? ' ?assertionId schema:hasVisibility "public" .' : ''}
${options.prefix ? `FILTER contains(lcase(?keyword),'${query}')` : `FILTER (lcase(?keyword) = '${query}')`}
}
${options.limit ? `LIMIT ${options.limit}` : ''}`;
const result = await this.execute(sparqlQuery);
return result.results.bindings;
}

async findAssetsByKeyword(query, options, localQuery) {
const sparqlQuery = `PREFIX schema: <http://schema.org/>
SELECT ?assertionId ?assetId
WHERE {
?assertionId schema:hasTimestamp ?latestTimestamp ;
${!localQuery ? 'schema:hasVisibility "public" ;' : ''}
schema:hasUALs ?assetId .
{
SELECT ?assetId (MAX(?timestamp) AS ?latestTimestamp)
WHERE {
?assertionId schema:hasKeywords ?keyword ;
schema:hasIssuer ?issuer ;
schema:hasType ?type ;
schema:hasTimestamp ?timestamp ;
schema:hasUALs ?assetId .
${options.prefix ? `FILTER contains(lcase(?keyword),'${query}')` : `FILTER (lcase(?keyword) = '${query}')`}
${options.issuers ? `FILTER (?issuer IN (${JSON.stringify(options.issuers).slice(1, -1)}))` : ''}
${options.types ? `FILTER (?type IN (${JSON.stringify(options.types).slice(1, -1)}))` : ''}
}
GROUP BY ?assetId
${options.limit ? `LIMIT ${options.limit}` : ''}
}
}`;
const result = await this.execute(sparqlQuery);
return result.results.bindings;
}

async healthCheck() {
try {
const response = await axios.get(`${this.config.url}/$/ping`, {});
if (response.data !== null) {
return true;
}
return false;
} catch (e) {
return false;
}
}

getName() {
return 'Fuseki';
}
}

module.exports = FusekiService;
11 changes: 1 addition & 10 deletions external/graphdb-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,16 +127,7 @@ class GraphdbService {
?s ?p ?o
}
}`;
let nquads = await this.construct(query);

if (nquads.length) {
nquads = nquads.toString();
nquads = nquads.replace(/_:genid(.){37}/gm, '_:$1');
nquads = nquads.split('\n');
nquads = nquads.filter((x) => x !== '');
} else {
nquads = null;
}
const nquads = await this.construct(query);
return nquads;
}

Expand Down
1 change: 0 additions & 1 deletion external/libp2p-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ const TCP = require('libp2p-tcp');
const pipe = require('it-pipe');
const {sha256} = require('multiformats/hashes/sha2');
const PeerId = require("peer-id");
const fs = require('fs');
const { BufferList } = require('bl')
const { InMemoryRateLimiter } = require("rolling-rate-limiter");
const constants = require('../modules/constants');
Expand Down
17 changes: 8 additions & 9 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,31 +39,30 @@ config = rc(pjson.name, defaultConfig);
process.exit(1);
}

const backupCode = `${config.autoUpdate.backupDirectory}/AutoGitUpdate/backup`;
if (fs.ensureDir(backupCode)) {
const backupCodeDirectory = path.join(config.autoUpdate.backupDirectory, 'auto-update', 'backup');
if (fs.ensureDir(backupCodeDirectory)) {
console.log('Starting back old version of OT-Node.');

const source = path.join(config.autoUpdate.backupDirectory, 'AutoGitUpdate', 'backup');
const destination = appRootPath.path;
await fs.ensureDir(destination);
await fs.copy(source, destination);
await fs.copy(backupCodeDirectory, destination);

await new Promise((resolve, reject) => {
const command = `cd ${destination} && npm install`;
const command = `cd ${destination} && npm install --omit=dev`;
const child = exec(command);

// Wait for results
child.stdout.on('end', resolve);
child.stdout.on('data', (data) => console.log(`Auto Git Update - npm install: ${data.replace(/\r?\n|\r/g, '')}`));
child.stdout.on('data', (data) => console.log(`AutoUpdater - npm install --omit=dev: ${data.replace(/\r?\n|\r/g, '')}`));
child.stderr.on('data', (data) => {
if (data.toLowerCase().includes('error')) {
// npm passes warnings as errors, only reject if "error" is included
data = data.replace(/\r?\n|\r/g, '');
console.error('Auto Git Update - Error installing dependencies');
console.error(`Auto Git Update - ${data}`);
console.error('AutoUpdater - Error installing dependencies');
console.error(`AutoUpdater - ${data}`);
reject();
} else {
console.log(`Auto Git Update - ${data}`);
console.log(`AutoUpdater - ${data}`);
}
});
});
Expand Down
17 changes: 17 additions & 0 deletions installer/data/fuseki.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#/lib/systemd/system/fuseki.service

[Unit]
Description=Fuseki - OriginTrail V6 Stage 1 Beta Node
Documentation=https://github.com/OriginTrail/ot-node
After=network.target

[Service]
Environment=JVM_ARGS=-Xmx4G
Type=simple
User=root
WorkingDirectory=/root/fuseki
ExecStart=/usr/bin/java -jar /root/fuseki/fuseki-server.jar --update --set tdb:unionDefaultGraph=true --loc /root/fuseki/tdb /node0
Restart=on-failure

[Install]
WantedBy=multi-user.target
Loading

0 comments on commit d846179

Please sign in to comment.