diff --git a/README.md b/README.md index 62779dad..4e08c00a 100644 --- a/README.md +++ b/README.md @@ -8,20 +8,24 @@ A `Node.js` typescript template with open api implementation Use a `.env` at root of the repository to set values for the environment variables defined in `.env` file. -| variable | required | default | description | -| :-------------------- | :------: | :--------------------: | :----------------------------------------------------------------------------------- | -| PORT | N | `3000` | The port for the API to listen on | -| LOG_LEVEL | N | `debug` | Logging level. Valid values are [`trace`, `debug`, `info`, `warn`, `error`, `fatal`] | -| ENVIRONMENT_VAR | N | `example` | An environment specific variable | -| DB_PORT | N | `5432` | The port for the database | -| DB_HOST | Y | - | The database hostname / host | -| DB_NAME | N | `dscp-matchmaker-api ` | The database name | -| DB_USERNAME | Y | - | The database username | -| DB_PASSWORD | Y | - | The database password | -| IDENTITY_SERVICE_HOST | Y | - | Hostname of the `dscp-identity-service` | -| IDENTITY_SERVICE_PORT | Y | - | Port of the `dscp-identity-service` | -| DSCP_API_HOST | Y | - | Hostname of the `dscp-api` | -| DSCP_API_PORT | Y | - | Port of the `dscp-api` | +| variable | required | default | description | +| :-------------------- | :------: | :--------------------: | :------------------------------------------------------------------------------------------- | +| PORT | N | `3000` | The port for the API to listen on | +| LOG_LEVEL | N | `debug` | Logging level. Valid values are [`trace`, `debug`, `info`, `warn`, `error`, `fatal`] | +| ENVIRONMENT_VAR | N | `example` | An environment specific variable | +| DB_PORT | N | `5432` | The port for the database | +| DB_HOST | Y | - | The database hostname / host | +| DB_NAME | N | `dscp-matchmaker-api ` | The database name | +| DB_USERNAME | Y | - | The database username | +| DB_PASSWORD | Y | - | The database password | +| IDENTITY_SERVICE_HOST | Y | - | Hostname of the `dscp-identity-service` | +| IDENTITY_SERVICE_PORT | N | `3000` | Port of the `dscp-identity-service` | +| NODE_HOST | Y | - | The hostname of the `dscp-node` the API should connect to | +| NODE_PORT | N | `9944` | The port of the `dscp-node` the API should connect to | +| LOG_LEVEL | N | `info` | Logging level. Valid values are [`trace`, `debug`, `info`, `warn`, `error`, `fatal`] | +| USER_URI | Y | - | The Substrate `URI` representing the private key to use when making `dscp-node` transactions | +| IPFS_HOST | Y | - | Hostname of the `IPFS` node to use for metadata storage | +| IPFS_PORT | N | `5001` | Port of the `IPFS` node to use for metadata storage | ## Getting started diff --git a/docker-compose.yml b/docker-compose.yml index c0f5fb0c..e8c8e7b6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -61,22 +61,6 @@ services: - 9933:9933 restart: on-failure - dscp-api: - image: digicatapult/dscp-api:latest - container_name: dscp-api - ports: - - 3001:3001 - environment: - - PORT=3001 - - API_HOST=dscp-node - - API_PORT=9944 - - USER_URI=//Alice - - IPFS_HOST=ipfs - - IPFS_PORT=5001 - - LOG_LEVEL=trace - - AUTH_TYPE=${AUTH_TYPE:-NONE} - restart: on-failure - ipfs: image: ipfs/go-ipfs:v0.18.1 container_name: ipfs diff --git a/package-lock.json b/package-lock.json index 04225724..f771cb9d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,16 @@ { "name": "@digicatapult/dscp-matchmaker-api", - "version": "0.5.11", + "version": "0.6.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@digicatapult/dscp-matchmaker-api", - "version": "0.5.11", + "version": "0.6.0", "license": "Apache-2.0", "dependencies": { "@polkadot/api": "^10.2.2", + "base-x": "^4.0.0", "body-parser": "^1.20.2", "cors": "^2.8.5", "dotenv": "^16.0.3", @@ -23,6 +24,7 @@ "tsoa": "^5.1.1" }, "devDependencies": { + "@polkadot/types": "^10.2.2", "@types/chai": "^4.3.4", "@types/cors": "^2.8.13", "@types/express": "^4.17.17", @@ -2131,6 +2133,11 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "node_modules/base-x": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-4.0.0.tgz", + "integrity": "sha512-FuwxlW4H5kh37X/oW59pwTzzTKRzfrrQwhmyspRM7swOEZcHtDZSCt45U6oKgtuFE+WYPblePMVIPR4RZrh/hw==" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -9106,6 +9113,11 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "base-x": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-4.0.0.tgz", + "integrity": "sha512-FuwxlW4H5kh37X/oW59pwTzzTKRzfrrQwhmyspRM7swOEZcHtDZSCt45U6oKgtuFE+WYPblePMVIPR4RZrh/hw==" + }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", diff --git a/package.json b/package.json index fd4742ad..a5925931 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@digicatapult/dscp-matchmaker-api", - "version": "0.5.11", + "version": "0.6.0", "description": "An OpenAPI Matchmaking API service for DSCP", "main": "src/index.ts", "scripts": { @@ -34,6 +34,7 @@ }, "homepage": "https://github.com/digicatapult/dscp-matchmaker-api#readme", "devDependencies": { + "@polkadot/types": "^10.2.2", "@types/chai": "^4.3.4", "@types/cors": "^2.8.13", "@types/express": "^4.17.17", @@ -63,6 +64,7 @@ }, "dependencies": { "@polkadot/api": "^10.2.2", + "base-x": "^4.0.0", "body-parser": "^1.20.2", "cors": "^2.8.5", "dotenv": "^16.0.3", diff --git a/src/controllers/attachment/index.ts b/src/controllers/attachment/index.ts index 8f4e3225..39289e5c 100644 --- a/src/controllers/attachment/index.ts +++ b/src/controllers/attachment/index.ts @@ -103,7 +103,7 @@ export class attachment extends Controller { if (!req.body && !file) throw new BadRequest('nothing to upload') - const [{ id, filename, binary_blob, created_at }]: any[] = await this.db + const [{ id, filename, binary_blob, created_at }] = await this.db .attachment() .insert({ filename: file ? file.originalname : 'json', diff --git a/src/controllers/capacity/index.ts b/src/controllers/capacity/index.ts index 00f8c618..9a2bf229 100644 --- a/src/controllers/capacity/index.ts +++ b/src/controllers/capacity/index.ts @@ -21,20 +21,31 @@ import { BadRequest, NotFound } from '../../lib/error-handler/index' import { getMemberByAddress, getMemberBySelf } from '../../lib/services/identity' import { TransactionResponse, TransactionState, TransactionApiType, TransactionType } from '../../models/transaction' import { DEMAND } from '../../models/tokenType' -import { runProcess } from '../..//lib/services/dscpApi' import { demandCreate } from '../../lib/payload' import { observeTokenId } from '../../lib/services/blockchainWatcher' +import ChainNode from '../../lib/chainNode' +import env from '../../env' + @Route('capacity') @Tags('capacity') @Security('bearerAuth') export class CapacityController extends Controller { log: Logger db: Database + node: ChainNode constructor() { super() this.log = logger.child({ controller: '/capacity' }) this.db = new Database() + this.node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, + }) } /** @@ -114,7 +125,7 @@ export class CapacityController extends Controller { }) // temp - until there is a blockchain watcher, need to await runProcess to know token IDs - const [tokenId] = await runProcess(demandCreate(capacity)) + const [tokenId] = await this.node.runProcess(demandCreate(capacity)) await this.db.updateTransaction(transaction.id, { state: TransactionState.finalised }) // demand-create returns a single token ID diff --git a/src/controllers/match2/index.ts b/src/controllers/match2/index.ts index c2b89ea5..8319567c 100644 --- a/src/controllers/match2/index.ts +++ b/src/controllers/match2/index.ts @@ -22,9 +22,10 @@ import { UUID } from '../../models/uuid' import { TransactionResponse, TransactionState, TransactionType, TransactionApiType } from '../../models/transaction' import { MATCH2, DEMAND } from '../../models/tokenType' import { observeTokenId } from '../../lib/services/blockchainWatcher' -import { runProcess } from '../../lib/services/dscpApi' import { match2AcceptFinal, match2AcceptFirst, match2Propose } from '../../lib/payload' import { DemandPayload, DemandState, DemandSubtype } from '../../models/demand' +import ChainNode from '../../lib/chainNode' +import env from '../../env' @Route('match2') @Tags('match2') @@ -32,11 +33,20 @@ import { DemandPayload, DemandState, DemandSubtype } from '../../models/demand' export class Match2Controller extends Controller { log: Logger db: Database + node: ChainNode constructor() { super() this.log = logger.child({ controller: '/match2' }) this.db = new Database() + this.node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, + }) } /** @@ -123,7 +133,7 @@ export class Match2Controller extends Controller { }) // temp - until there is a blockchain watcher, need to await runProcess to know token IDs - const tokenIds = await runProcess(match2Propose(match2, demandA, demandB)) + const tokenIds = await this.node.runProcess(match2Propose(match2, demandA, demandB)) await this.db.updateTransaction(transaction.id, { state: TransactionState.finalised }) // match2-propose returns 3 token IDs @@ -205,7 +215,7 @@ export class Match2Controller extends Controller { const newState = ownsDemandA ? Match2State.acceptedA : Match2State.acceptedB // temp - until there is a blockchain watcher, need to await runProcess to know token IDs - const [tokenId] = await runProcess(match2AcceptFirst(match2, newState, demandA, demandB)) + const [tokenId] = await this.node.runProcess(match2AcceptFirst(match2, newState, demandA, demandB)) await this.db.updateTransaction(transaction.id, { state: TransactionState.finalised }) await observeTokenId(MATCH2, match2.id, newState, tokenId, false) @@ -222,7 +232,7 @@ export class Match2Controller extends Controller { }) // temp - until there is a blockchain watcher, need to await runProcess to know token IDs - const tokenIds = await runProcess(match2AcceptFinal(match2, demandA, demandB)) + const tokenIds = await this.node.runProcess(match2AcceptFinal(match2, demandA, demandB)) await this.db.updateTransaction(transaction.id, { state: TransactionState.finalised }) // match2-acceptFinal returns 3 token IDs diff --git a/src/controllers/order/index.ts b/src/controllers/order/index.ts index d90b5463..3475fb39 100644 --- a/src/controllers/order/index.ts +++ b/src/controllers/order/index.ts @@ -20,9 +20,10 @@ import { logger } from '../../lib/logger' import { BadRequest, NotFound } from '../../lib/error-handler' import Database from '../../lib/db' import { getMemberByAddress, getMemberBySelf } from '../../lib/services/identity' -import { runProcess } from '../..//lib/services/dscpApi' import { observeTokenId } from '../../lib/services/blockchainWatcher' import { demandCreate } from '../../lib/payload' +import ChainNode from '../../lib/chainNode' +import env from '../../env' @Route('order') @Tags('order') @@ -30,11 +31,20 @@ import { demandCreate } from '../../lib/payload' export class order extends Controller { log: Logger db: Database + node: ChainNode constructor() { super() this.log = logger.child({ controller: '/order' }) this.db = new Database() + this.node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, + }) } /** @@ -119,7 +129,7 @@ export class order extends Controller { state: TransactionState.submitted, }) - const [tokenId] = await runProcess(demandCreate(order)) + const [tokenId] = await this.node.runProcess(demandCreate(order)) await this.db.updateTransaction(transaction.id, { state: TransactionState.finalised }) // demand-create returns a single token ID diff --git a/src/env.ts b/src/env.ts index 80dd7edf..ca2bd6a2 100644 --- a/src/env.ts +++ b/src/env.ts @@ -16,10 +16,11 @@ export default envalid.cleanEnv(process.env, { DB_PASSWORD: envalid.str({ devDefault: 'postgres' }), DB_NAME: envalid.str({ default: 'dscp-matchmaker-api' }), IDENTITY_SERVICE_HOST: envalid.host({ devDefault: 'localhost' }), - IDENTITY_SERVICE_PORT: envalid.port({ devDefault: 3002 }), - DSCP_API_HOST: envalid.host({ devDefault: 'localhost' }), - DSCP_API_PORT: envalid.port({ devDefault: 3001 }), + IDENTITY_SERVICE_PORT: envalid.port({ devDefault: 3002, default: 3000 }), NODE_HOST: envalid.host({ default: 'localhost' }), NODE_PORT: envalid.port({ default: 9944 }), ENABLE_INDEXER: envalid.bool({ default: false }), + USER_URI: envalid.str({ devDefault: '//Alice' }), + IPFS_HOST: envalid.host({ devDefault: 'localhost' }), + IPFS_PORT: envalid.port({ default: 5001 }), }) diff --git a/src/index.ts b/src/index.ts index 548549fd..6cb7efa2 100644 --- a/src/index.ts +++ b/src/index.ts @@ -14,6 +14,9 @@ import ChainNode from './lib/chainNode' host: env.NODE_HOST, port: env.NODE_PORT, logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, }) const handleBlock = () => Promise.resolve({}) diff --git a/src/lib/chainNode.ts b/src/lib/chainNode.ts index e4d376d2..ba88935a 100644 --- a/src/lib/chainNode.ts +++ b/src/lib/chainNode.ts @@ -1,23 +1,49 @@ -import { ApiPromise, WsProvider } from '@polkadot/api' +import { ApiPromise, WsProvider, Keyring, SubmittableResult } from '@polkadot/api' +import type { u128 } from '@polkadot/types' + import { Logger } from 'pino' +import { HttpResponse } from './error-handler' + +import Ipfs from './ipfs' +import type { Payload, Output, Metadata, MetadataFile } from './payload' export interface NodeCtorConfig { host: string port: number logger: Logger + userUri: string + ipfsHost: string + ipfsPort: number } +interface RoleEnum { + name: string | undefined + index: number | undefined +} + +type EventData = + | { + outputs: u128[] + } + | undefined + export default class ChainNode { private provider: WsProvider private api: ApiPromise - // private keyring: Keyring + private keyring: Keyring private logger: Logger + private userUri: string + private roles: RoleEnum[] + private ipfs: Ipfs - constructor({ host, port, logger }: NodeCtorConfig) { + constructor({ host, port, logger, userUri, ipfsHost, ipfsPort }: NodeCtorConfig) { this.logger = logger.child({ module: 'ChainNode' }) this.provider = new WsProvider(`ws://${host}:${port}`) + this.userUri = userUri this.api = new ApiPromise({ provider: this.provider }) - // this.keyring = new Keyring({ type: 'sr25519' }) + this.keyring = new Keyring({ type: 'sr25519' }) + this.roles = [] + this.ipfs = new Ipfs({ host: ipfsHost, port: ipfsPort, logger }) // eslint-disable-next-line @typescript-eslint/no-empty-function this.api.isReadyOrError.catch(() => {}) // prevent unhandled promise rejection errors @@ -51,6 +77,128 @@ export default class ChainNode { } } + async getRoles(): Promise { + await this.api.isReady + + const registry = this.api.registry + const lookup = registry.lookup + const lookupId = registry.getDefinition('DscpNodeRuntimeRole') as `Lookup${number}` + + const rolesEnum = lookup.getTypeDef(lookupId).sub + if (Array.isArray(rolesEnum)) { + return rolesEnum.map((e) => ({ name: e.name, index: e.index })) + } else { + throw new Error('No roles found on-chain') + } + } + + roleToIndex(role: string) { + const entry = this.roles.find((e) => e.name === role) + + if (!entry || entry.index === undefined) { + throw new Error(`Invalid role: ${role}`) + } + + return entry.index + } + + async runProcess({ process, inputs, outputs }: Payload): Promise { + await this.api.isReady + + const account = this.keyring.addFromUri(this.userUri) + + const outputsAsMaps = await Promise.all( + outputs.map(async (output: Output) => [ + await this.processRoles(output.roles), + await this.processMetadata(output.metadata), + ]) + ) + + this.logger.debug('Running Transaction inputs: %j outputs: %j', inputs, outputsAsMaps) + + return new Promise((resolve, reject) => { + let unsub: () => void + this.api.tx.simpleNFT + .runProcess(process, inputs, outputsAsMaps) + .signAndSend(account, (result: SubmittableResult) => { + this.logger.debug('result.status %s', JSON.stringify(result.status)) + this.logger.debug('result.status.isInBlock', result.status.isInBlock) + const { dispatchError, status } = result + + if (dispatchError) { + if (dispatchError.isModule) { + const decoded = this.api.registry.findMetaError(dispatchError.asModule) + reject(new HttpResponse({ message: `Node dispatch error: ${decoded.name}` })) + } else { + reject(new HttpResponse({ message: `Unknown node dispatch error: ${dispatchError}` })) + } + } + + if (status.isInBlock) { + const processRanEvent = result.events.find(({ event: { method } }) => method === 'ProcessRan') + const data = processRanEvent?.event?.data as EventData + const tokens = data?.outputs?.map((x) => x.toNumber()) + + unsub() + tokens ? resolve(tokens) : reject(Error('No token IDs returned')) + } + }) + .then((res) => { + unsub = res + }) + .catch((err) => { + this.logger.warn(`Error in run process transaction: ${err}`) + throw err + }) + }) + } + + async processRoles(roles: Record) { + if (this.roles.length === 0) { + this.roles = await this.getRoles() + } + + return new Map( + Object.entries(roles).map(([key, v]) => { + return [this.roleToIndex(key), v] + }) + ) + } + + async processMetadata(metadata: Metadata) { + return new Map( + await Promise.all( + Object.entries(metadata).map(async ([key, value]) => { + let processedValue + switch (value.type) { + case 'LITERAL': + processedValue = { Literal: value.value as string } + break + case 'TOKEN_ID': + processedValue = { TokenId: value.value as string } + break + case 'FILE': + processedValue = { File: await this.ipfs.addFile(value.value as MetadataFile) } + break + default: + case 'NONE': + processedValue = { None: null } + break + } + + return [key, processedValue] as readonly [unknown, unknown] + }) + ) + ) + } + + async getLastTokenId() { + await this.api.isReady + const lastTokenId = await this.api.query.simpleNFT.lastToken() + + return lastTokenId ? parseInt(lastTokenId.toString(), 10) : 0 + } + async watchFinalisedBlocks(onNewFinalisedHead: (blockHash: string) => Promise) { await this.api.isReady await this.api.rpc.chain.subscribeFinalizedHeads((header) => onNewFinalisedHead(header.hash.toHex())) diff --git a/src/lib/db/index.ts b/src/lib/db/index.ts index a3d16010..bff66b64 100644 --- a/src/lib/db/index.ts +++ b/src/lib/db/index.ts @@ -104,12 +104,12 @@ export default class Database { return this.db().demand().select(demandColumns).where({ id }) } - getDemandWithAttachment = async (capacityId: UUID, subtype: DemandSubtype) => { + getDemandWithAttachment = async (id: UUID, subtype: DemandSubtype) => { return this.db() .demand() .join('attachment', 'demand.parameters_attachment_id', 'attachment.id') .select() - .where({ 'demand.id': capacityId, subtype }) + .where({ 'demand.id': id, subtype }) } insertTransaction = async (transaction: object) => { diff --git a/src/lib/ipfs.ts b/src/lib/ipfs.ts new file mode 100644 index 00000000..b75d4640 --- /dev/null +++ b/src/lib/ipfs.ts @@ -0,0 +1,61 @@ +import basex from 'base-x' +import { Logger } from 'pino' + +import type { MetadataFile } from './payload' +import { HttpResponse } from './error-handler' + +const BASE58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' +const bs58 = basex(BASE58) + +interface FilestoreResponse { + Name: string + Hash: string + Size: string +} + +export default class Ipfs { + private url: string + private logger: Logger + + constructor({ host, port, logger }: { host: string; port: number; logger: Logger }) { + this.url = `http://${host}:${port}/api/v0/add?cid-version=0&wrap-with-directory=true` + this.logger = logger.child({ module: 'ipfs' }) + } + + async addFile({ blob, filename }: MetadataFile): Promise { + this.logger.debug('Uploading file %s', filename) + const form = new FormData() + form.append('file', blob, filename) + const res = await fetch(this.url, { + method: 'POST', + body: form, + }) + + const text = await res.text() + + if (!res.ok) { + throw new HttpResponse({ code: 500, message: text }) + } + + // Build string of objects into array + const json = text + .split('\n') + .filter((obj) => obj.length > 0) + .map((obj) => JSON.parse(obj)) + + const hash = formatHash(json) + this.logger.debug('Upload of file %s succeeded. Hash is %s', filename, hash) + return hash + } +} + +const formatHash = (filestoreResponse: FilestoreResponse[]) => { + // directory has no Name + const dir = filestoreResponse.find((r) => r.Name === '') + if (dir && dir.Hash && dir.Size) { + const decoded = Buffer.from(bs58.decode(dir.Hash)) + return `0x${decoded.toString('hex').slice(4)}` + } else { + throw new HttpResponse({ code: 500, message: 'ipfs failed to make directory' }) + } +} diff --git a/src/lib/payload.ts b/src/lib/payload.ts index 0489e94f..d1877d1a 100644 --- a/src/lib/payload.ts +++ b/src/lib/payload.ts @@ -2,8 +2,25 @@ import { Match2Payload, Match2Response, Match2State } from '../models/match2' import { DemandPayload, DemandState } from '../models/demand' import * as TokenType from '../models/tokenType' -export const demandCreate = (demand: DemandPayload) => ({ - files: [{ blob: new Blob([demand.binary_blob]), filename: demand.filename }], +export interface Payload { + process: { id: string; version: number } + inputs: number[] + outputs: Output[] +} + +export interface Output { + roles: Record + metadata: Metadata +} + +export interface MetadataFile { + blob: Blob + filename: string +} + +export type Metadata = Record + +export const demandCreate = (demand: DemandPayload): Payload => ({ process: { id: 'demand-create', version: 1 }, inputs: [], outputs: [ @@ -14,14 +31,13 @@ export const demandCreate = (demand: DemandPayload) => ({ type: { type: 'LITERAL', value: TokenType.DEMAND }, state: { type: 'LITERAL', value: DemandState.created }, subtype: { type: 'LITERAL', value: demand.subtype }, - parameters: { type: 'FILE', value: demand.filename }, + parameters: { type: 'FILE', value: { blob: new Blob([demand.binary_blob]), filename: demand.filename } }, }, }, ], }) -export const match2Propose = (match2: Match2Response, demandA: DemandPayload, demandB: DemandPayload) => ({ - files: [], +export const match2Propose = (match2: Match2Response, demandA: DemandPayload, demandB: DemandPayload): Payload => ({ process: { id: 'match2-propose', version: 1 }, inputs: [demandA.latestTokenId, demandB.latestTokenId], outputs: [ @@ -63,8 +79,7 @@ export const match2AcceptFirst = ( newState: Match2State.acceptedA | Match2State.acceptedB, demandA: DemandPayload, demandB: DemandPayload -) => ({ - files: [], +): Payload => ({ process: { id: 'match2-accept', version: 1 }, inputs: [match2.latestTokenId], outputs: [ @@ -82,8 +97,7 @@ export const match2AcceptFirst = ( ], }) -export const match2AcceptFinal = (match2: Match2Payload, demandA: DemandPayload, demandB: DemandPayload) => ({ - files: [], +export const match2AcceptFinal = (match2: Match2Payload, demandA: DemandPayload, demandB: DemandPayload): Payload => ({ process: { id: 'match2-acceptFinal', version: 1 }, inputs: [demandA.latestTokenId, demandB.latestTokenId, match2.latestTokenId], outputs: [ diff --git a/src/lib/services/dscpApi.ts b/src/lib/services/dscpApi.ts deleted file mode 100644 index bc5f7b59..00000000 --- a/src/lib/services/dscpApi.ts +++ /dev/null @@ -1,28 +0,0 @@ -import env from '../../env' -import { HttpResponse } from '../error-handler' - -const URL_PREFIX = `http://${env.DSCP_API_HOST}:${env.DSCP_API_PORT}/v3` - -export interface RunProcessFile { - blob: Blob - filename: string -} - -export const runProcess = async ({ files, ...payload }: { files: RunProcessFile[] }) => { - const url = `${URL_PREFIX}/run-process` - const formData = new FormData() - - formData.append('request', JSON.stringify(payload)) - files.map((f: RunProcessFile) => formData.append('files', f.blob, f.filename || 'unknown')) - - const res = await fetch(url, { - method: 'POST', - body: formData, - }) - - if (res.ok) { - return await res.json() - } - - throw new HttpResponse({ code: 500, message: await res.text() }) // pass through dscpApi error -} diff --git a/test/helper/mock.ts b/test/helper/mock.ts index fa4975a6..4be77a85 100644 --- a/test/helper/mock.ts +++ b/test/helper/mock.ts @@ -5,16 +5,12 @@ export const selfAlias = 'test-self' export const selfAddress = '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY' export const notSelfAlias = 'test-not-self' export const notSelfAddress = '5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty' -export const demandCreateMockTokenId = 42 -export const match2ProposeMockTokenIds = [52, 53, 54] -export const match2AcceptMockTokenId = 62 -export const match2AcceptFinalMockTokenIds = [72, 73, 74] const mockAgent = new MockAgent() setGlobalDispatcher(mockAgent) const mockIdentity = mockAgent.get(`http://${env.IDENTITY_SERVICE_HOST}:${env.IDENTITY_SERVICE_PORT}`) -const mockApi = mockAgent.get(`http://${env.DSCP_API_HOST}:${env.DSCP_API_PORT}`) +const mockIpfs = mockAgent.get(`http://${env.IPFS_HOST}:${env.IPFS_PORT}`) export const identitySelfMock = () => { mockIdentity @@ -51,47 +47,20 @@ export const identitySelfMock = () => { .persist() } -export const demandCreateMock = () => { - mockApi +export const ipfsMock = () => { + mockIpfs .intercept({ - path: '/v3/run-process', + path: '/api/v0/add?cid-version=0&wrap-with-directory=true', method: 'POST', }) - .reply(200, [demandCreateMockTokenId]) + .reply(200, { Name: '', Hash: 'QmXVStDC6kTpVHY1shgBQmyA4SuSrYnNRnHSak5iB6Eehn', Size: '63052' }) } -export const match2ProposeMock = () => { - mockApi +export const ipfsMockError = () => { + mockIpfs .intercept({ - path: '/v3/run-process', + path: '/api/v0/add?cid-version=0&wrap-with-directory=true', method: 'POST', }) - .reply(200, match2ProposeMockTokenIds) -} - -export const match2AcceptMock = () => { - mockApi - .intercept({ - path: '/v3/run-process', - method: 'POST', - }) - .reply(200, [match2AcceptMockTokenId]) -} - -export const match2AcceptFinalMock = () => { - mockApi - .intercept({ - path: '/v3/run-process', - method: 'POST', - }) - .reply(200, match2AcceptFinalMockTokenIds) -} - -export const apiRunProcessMockError = () => { - mockApi - .intercept({ - path: '/v3/run-process', - method: 'POST', - }) - .reply(400, 'invalid') + .reply(500, 'error') } diff --git a/test/integration/capacity.test.ts b/test/integration/capacity.test.ts index dc49d786..ea9f44c5 100644 --- a/test/integration/capacity.test.ts +++ b/test/integration/capacity.test.ts @@ -17,17 +17,22 @@ import { } from '../seeds' import { DemandState } from '../../src/models/demand' -import { - selfAlias, - identitySelfMock, - demandCreateMock, - apiRunProcessMockError, - demandCreateMockTokenId, -} from '../helper/mock' +import { selfAlias, identitySelfMock, ipfsMock, ipfsMockError } from '../helper/mock' import { TransactionState, TransactionApiType, TransactionType } from '../../src/models/transaction' import Database from '../../src/lib/db' +import ChainNode from '../../src/lib/chainNode' +import { logger } from '../../src/lib/logger' +import env from '../../src/env' const db = new Database() +const node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, +}) describe('capacity', () => { let app: Express @@ -85,7 +90,9 @@ describe('capacity', () => { }) it('should create a capacity on-chain', async () => { - demandCreateMock() + ipfsMock() + const lastTokenId = await node.getLastTokenId() + // submit to chain const response = await post(app, `/capacity/${seededCapacityId}/creation`, {}) expect(response.status).to.equal(201) @@ -102,8 +109,8 @@ describe('capacity', () => { // check local capacity updates with token id const [capacity] = await db.getDemand(seededCapacityId) - expect(capacity.latestTokenId).to.equal(demandCreateMockTokenId) - expect(capacity.originalTokenId).to.equal(demandCreateMockTokenId) + expect(capacity.latestTokenId).to.equal(lastTokenId + 1) + expect(capacity.originalTokenId).to.equal(lastTokenId + 1) }) it('it should get a transaction', async () => { @@ -175,12 +182,6 @@ describe('capacity', () => { expect(response.body).to.equal(`Demand must have state: ${DemandState.created}`) }) - it('dscp-api error - 500', async () => { - apiRunProcessMockError() - const response = await post(app, `/capacity/${seededCapacityId}/creation`, {}) - expect(response.status).to.equal(500) - }) - it('non-existent Creation ID - 404', async () => { const response = await get(app, `/capacity/${seededCapacityId}/creation/${nonExistentId}`) expect(response.status).to.equal(404) @@ -195,5 +196,13 @@ describe('capacity', () => { const response = await get(app, `/capacity/${nonExistentId}/creation/`) expect(response.status).to.equal(404) }) + + it('ipfs error - 500', async () => { + ipfsMockError() + + const { status, body } = await post(app, `/capacity/${seededCapacityId}/creation`, {}) + expect(status).to.equal(500) + expect(body).to.equal('error') + }) }) }) diff --git a/test/integration/match2.test.ts b/test/integration/match2.test.ts index f75ba6f1..c8008fc8 100644 --- a/test/integration/match2.test.ts +++ b/test/integration/match2.test.ts @@ -11,8 +11,7 @@ import { seededOrderId, nonExistentId, seededMatch2Id, - seededOrderTokenId, - seededCapacityTokenId, + parametersAttachmentId, seededOrderMissingTokenId, seededCapacityMissingTokenId, seededProposalTransactionId, @@ -21,31 +20,33 @@ import { seededOrderAlreadyAllocated, seededMatch2WithAllocatedDemands, seededMatch2AcceptedA, - seededMatch2TokenId, - seededMatch2OrderNotOwnedId, seededMatch2AcceptedFinal, seededMatch2NotAcceptableB, seededMatch2NotAcceptableA, seededMatch2NotAcceptableBoth, seededAcceptTransactionId, + seededOrderWithTokenId, } from '../seeds' -import { - selfAlias, - identitySelfMock, - match2ProposeMock, - match2ProposeMockTokenIds, - match2AcceptMock, - match2AcceptMockTokenId, - match2AcceptFinalMock, - match2AcceptFinalMockTokenIds, -} from '../helper/mock' +import { selfAlias, identitySelfMock, ipfsMock } from '../helper/mock' import { Match2State } from '../../src/models/match2' import { TransactionState, TransactionApiType, TransactionType } from '../../src/models/transaction' import Database from '../../src/lib/db' import { DemandState } from '../../src/models/demand' +import ChainNode from '../../src/lib/chainNode' +import { logger } from '../../src/lib/logger' +import env from '../../src/env' +import { UUID } from '../../src/models/uuid' const db = new Database() +const node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, +}) describe('match2', () => { let app: Express @@ -111,47 +112,107 @@ describe('match2', () => { }) }) - it('should propose a match2 on-chain', async () => { - match2ProposeMock() - // submit to chain - const response = await post(app, `/match2/${seededMatch2Id}/proposal`, {}) - expect(response.status).to.equal(201) + describe('on-chain', async () => { + let orderOriginalId: number + let capacityOriginalId: number + let orderLocalId: UUID + let capacityLocalId: UUID + let match2LocalId: UUID + + beforeEach(async () => { + // prepare an unallocated order + capacity + local match2 + ipfsMock() + const { + body: { id: orderId }, + } = await post(app, '/order', { parametersAttachmentId }) + await post(app, `/order/${orderId}/creation`, {}) + const [order] = await db.getDemand(orderId) + orderLocalId = orderId + orderOriginalId = order.originalTokenId + + ipfsMock() + const { + body: { id: capacityId }, + } = await post(app, '/capacity', { parametersAttachmentId }) + await post(app, `/capacity/${capacityId}/creation`, {}) + const [capacity] = await db.getDemand(capacityId) + capacityLocalId = capacityId + capacityOriginalId = capacity.originalTokenId + + const { + body: { id: match2Id }, + } = await post(app, '/match2', { demandA: orderId, demandB: capacityId }) + match2LocalId = match2Id + }) - const { id: transactionId, state } = response.body - expect(transactionId).to.match( - /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89ABab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ - ) - expect(state).to.equal(TransactionState.submitted) + it('should propose a match2 on-chain', async () => { + const lastTokenId = await node.getLastTokenId() - // check local transaction updates - const [transaction] = await db.getTransaction(transactionId) - expect(transaction.state).to.equal(TransactionState.finalised) + // submit to chain + const response = await post(app, `/match2/${match2LocalId}/proposal`, {}) + expect(response.status).to.equal(201) - // check local entities update with token id - const [demandA] = await db.getDemand(seededOrderId) - expect(demandA.latestTokenId).to.equal(match2ProposeMockTokenIds[0]) - expect(demandA.originalTokenId).to.equal(seededOrderTokenId) + const { id: transactionId, state } = response.body + expect(transactionId).to.match( + /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89ABab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$/ + ) + expect(state).to.equal(TransactionState.submitted) - const [demandB] = await db.getDemand(seededCapacityId) - expect(demandB.latestTokenId).to.equal(match2ProposeMockTokenIds[1]) - expect(demandB.originalTokenId).to.equal(seededCapacityTokenId) + // check local transaction updates + const [transaction] = await db.getTransaction(transactionId) + expect(transaction.state).to.equal(TransactionState.finalised) - const [match2] = await db.getMatch2(seededMatch2Id) - expect(match2.latestTokenId).to.equal(match2ProposeMockTokenIds[2]) - expect(match2.originalTokenId).to.equal(match2ProposeMockTokenIds[2]) - }) + // check local entities update with token id + const [demandA] = await db.getDemand(orderLocalId) + expect(demandA.latestTokenId).to.equal(lastTokenId + 1) + expect(demandA.originalTokenId).to.equal(orderOriginalId) - it('it should get a proposal transaction', async () => { - const response = await get(app, `/match2/${seededMatch2Id}/proposal/${seededProposalTransactionId}`) - expect(response.status).to.equal(200) - expect(response.body).to.deep.equal({ - id: seededProposalTransactionId, - apiType: TransactionApiType.match2, - transactionType: TransactionType.proposal, - localId: seededMatch2Id, - state: TransactionState.submitted, - submittedAt: exampleDate, - updatedAt: exampleDate, + const [demandB] = await db.getDemand(capacityLocalId) + expect(demandB.latestTokenId).to.equal(lastTokenId + 2) + expect(demandB.originalTokenId).to.equal(capacityOriginalId) + + const [match2] = await db.getMatch2(match2LocalId) + expect(match2.latestTokenId).to.equal(lastTokenId + 3) + expect(match2.originalTokenId).to.equal(lastTokenId + 3) + }) + + it('should acceptA then acceptFinal a match2 on-chain', async () => { + // propose + await post(app, `/match2/${match2LocalId}/proposal`, {}) + const [match2] = await db.getMatch2(match2LocalId) + const match2OriginalId = match2.originalTokenId + + const lastTokenId = await node.getLastTokenId() + + // submit accept to chain + const responseAcceptA = await post(app, `/match2/${match2LocalId}/accept`, {}) + expect(responseAcceptA.status).to.equal(201) + + // check local entities update with token id + const [match2AcceptA] = await db.getMatch2(match2LocalId) + expect(match2AcceptA.latestTokenId).to.equal(lastTokenId + 1) + expect(match2AcceptA.state).to.equal(Match2State.acceptedA) + expect(match2AcceptA.originalTokenId).to.equal(match2OriginalId) + + // submit 2nd accept to chain + const responseAcceptFinal = await post(app, `/match2/${match2LocalId}/accept`, {}) + expect(responseAcceptFinal.status).to.equal(201) + + // check local entities update with token id + const [demandA] = await db.getDemand(orderLocalId) + expect(demandA.latestTokenId).to.equal(lastTokenId + 2) + expect(demandA.state).to.equal(DemandState.allocated) + expect(demandA.originalTokenId).to.equal(orderOriginalId) + + const [demandB] = await db.getDemand(capacityLocalId) + expect(demandB.latestTokenId).to.equal(lastTokenId + 3) + expect(demandB.state).to.equal(DemandState.allocated) + expect(demandB.originalTokenId).to.equal(capacityOriginalId) + + const [matchAcceptFinal] = await db.getMatch2(match2LocalId) + expect(matchAcceptFinal.latestTokenId).to.equal(lastTokenId + 4) + expect(matchAcceptFinal.state).to.equal(Match2State.acceptedFinal) + expect(matchAcceptFinal.originalTokenId).to.equal(match2OriginalId) }) }) @@ -170,53 +231,6 @@ describe('match2', () => { }) }) - it('should acceptA then acceptFinal a match2 on-chain', async () => { - match2AcceptMock() - // submit to chain - const responseAcceptA = await post(app, `/match2/${seededMatch2Id}/accept`, {}) - expect(responseAcceptA.status).to.equal(201) - - // check local entities update with token id - const [match2AcceptA] = await db.getMatch2(seededMatch2Id) - expect(match2AcceptA.latestTokenId).to.equal(match2AcceptMockTokenId) - expect(match2AcceptA.state).to.equal(Match2State.acceptedA) - expect(match2AcceptA.originalTokenId).to.equal(seededMatch2TokenId) - - match2AcceptFinalMock() - // submit to chain - const responseAcceptFinal = await post(app, `/match2/${seededMatch2Id}/accept`, {}) - expect(responseAcceptFinal.status).to.equal(201) - - // check local entities update with token id - const [demandA] = await db.getDemand(seededOrderId) - expect(demandA.latestTokenId).to.equal(match2AcceptFinalMockTokenIds[0]) - expect(demandA.state).to.equal(DemandState.allocated) - expect(demandA.originalTokenId).to.equal(seededOrderTokenId) - - const [demandB] = await db.getDemand(seededCapacityId) - expect(demandB.latestTokenId).to.equal(match2AcceptFinalMockTokenIds[1]) - expect(demandB.state).to.equal(DemandState.allocated) - expect(demandB.originalTokenId).to.equal(seededCapacityTokenId) - - const [matchAcceptFinal] = await db.getMatch2(seededMatch2Id) - expect(matchAcceptFinal.latestTokenId).to.equal(match2AcceptFinalMockTokenIds[2]) - expect(matchAcceptFinal.state).to.equal(Match2State.acceptedFinal) - expect(matchAcceptFinal.originalTokenId).to.equal(seededMatch2TokenId) - }) - - it('should acceptB a match2 on-chain', async () => { - match2AcceptMock() - // submit to chain - const response = await post(app, `/match2/${seededMatch2OrderNotOwnedId}/accept`, {}) - expect(response.status).to.equal(201) - - // check local entities update with token id - const [match2] = await db.getMatch2(seededMatch2OrderNotOwnedId) - expect(match2.latestTokenId).to.equal(match2AcceptMockTokenId) - expect(match2.state).to.equal(Match2State.acceptedB) - expect(match2.originalTokenId).to.equal(seededMatch2TokenId) - }) - it('it should get an accept transaction', async () => { const response = await get(app, `/match2/${seededMatch2Id}/accept/${seededAcceptTransactionId}`) expect(response.status).to.equal(200) @@ -311,7 +325,10 @@ describe('match2', () => { }) it('demandB missing token ID - 400', async () => { - const createMatch2 = await post(app, '/match2', { demandA: seededOrderId, demandB: seededCapacityMissingTokenId }) + const createMatch2 = await post(app, '/match2', { + demandA: seededOrderWithTokenId, + demandB: seededCapacityMissingTokenId, + }) expect(createMatch2.status).to.equal(201) const response = await post(app, `/match2/${createMatch2.body.id}/proposal`, {}) diff --git a/test/integration/order.test.ts b/test/integration/order.test.ts index 25a86547..287d71cf 100644 --- a/test/integration/order.test.ts +++ b/test/integration/order.test.ts @@ -5,14 +5,25 @@ import { expect } from 'chai' import createHttpServer from '../../src/server' import { post, get } from '../helper/routeHelper' -import { seed, parametersAttachmentId, seededOrderId, seededOrderCreationId ,cleanup } from '../seeds' +import { seed, parametersAttachmentId, seededOrderId, seededOrderCreationId, cleanup } from '../seeds' import { DemandState } from '../../src/models/demand' -import { selfAlias, identitySelfMock, demandCreateMock } from '../helper/mock' +import { selfAlias, identitySelfMock, ipfsMock } from '../helper/mock' import { TransactionState } from '../../src/models/transaction' import Database from '../../src/lib/db' +import ChainNode from '../../src/lib/chainNode' +import { logger } from '../../src/lib/logger' +import env from '../../src/env' const db = new Database() +const node = new ChainNode({ + host: env.NODE_HOST, + port: env.NODE_PORT, + logger, + userUri: env.USER_URI, + ipfsHost: env.IPFS_HOST, + ipfsPort: env.IPFS_PORT, +}) describe('order', () => { let res: any @@ -21,7 +32,6 @@ describe('order', () => { before(async function () { app = await createHttpServer() identitySelfMock() - demandCreateMock() }) beforeEach(async () => await seed()) @@ -29,11 +39,11 @@ describe('order', () => { describe('when requested order or orders do not exist', () => { beforeEach(async () => await cleanup()) - it ('returns 200 and an empty array when retrieving all', async () => { + it('returns 200 and an empty array when retrieving all', async () => { const { status, body } = await get(app, '/order') expect(status).to.equal(200) - expect(body).to.be.an( "array" ).that.is.empty + expect(body).to.be.an('array').that.is.empty }) it('returns 404 if can not be found by ID', async () => { @@ -42,7 +52,7 @@ describe('order', () => { expect(status).to.equal(404) expect(body).to.equal('order not found') }) - // TODO - assert for max number of records + // TODO - assert for max number of records }) describe('if attachment can not be found', () => { @@ -69,7 +79,7 @@ describe('order', () => { expect(status).to.equal(422) expect(body).to.deep.contain({ fields: { - 'orderId': { + orderId: { message: "Not match in '[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-4[0-9A-Fa-f]{3}-[89ABab][0-9A-Fa-f]{3}-[0-9A-Fa-f]{12}'", value: '789ad47', @@ -106,14 +116,14 @@ describe('order', () => { describe('if order state is not created while posting new creation', () => { beforeEach(async () => { - await db.insertDemand({ - id: 'b21f865e-f4e9-4ae2-8944-de691e9eb4d0', - owner: '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', - subtype: 'order', - state: 'allocated', - parameters_attachment_id: parametersAttachmentId, - latest_token_id: 99, - original_token_id: 99, + await db.insertDemand({ + id: 'b21f865e-f4e9-4ae2-8944-de691e9eb4d0', + owner: '5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY', + subtype: 'order', + state: 'allocated', + parameters_attachment_id: parametersAttachmentId, + latest_token_id: 99, + original_token_id: 99, }) }) @@ -133,7 +143,7 @@ describe('order', () => { expect(body).to.deep.contain({ owner: 'test-self', state: 'created', - parametersAttachmentId: 'a789ad47-91c3-446e-90f9-a7c9b233eaf8' + parametersAttachmentId: 'a789ad47-91c3-446e-90f9-a7c9b233eaf8', }) }) @@ -153,7 +163,12 @@ describe('order', () => { }) it('creates an order transaction on chain', async () => { - const { body: { id: orderId } } = await post(app, '/order', { parametersAttachmentId }) + ipfsMock() + const lastTokenId = await node.getLastTokenId() + + const { + body: { id: orderId }, + } = await post(app, '/order', { parametersAttachmentId }) // submit to chain const response = await post(app, `/order/${orderId}/creation`, {}) @@ -180,8 +195,8 @@ describe('order', () => { state: 'created', subtype: 'order', parametersAttachmentId, - latestTokenId: 42, - originalTokenId: 42, + latestTokenId: lastTokenId + 1, + originalTokenId: lastTokenId + 1, }) }) diff --git a/test/mocharc.json b/test/mocharc.json index 5c5ae516..dee2d951 100644 --- a/test/mocharc.json +++ b/test/mocharc.json @@ -1,4 +1,4 @@ { - "timeout": 10000, + "timeout": 30000, "exit": true } diff --git a/test/seeds/index.ts b/test/seeds/index.ts index 7599dba7..f3f13c05 100644 --- a/test/seeds/index.ts +++ b/test/seeds/index.ts @@ -15,20 +15,16 @@ export const cleanup = async () => { export const parametersAttachmentId = 'a789ad47-91c3-446e-90f9-a7c9b233eaf8' export const seededCapacityId = '0f5af074-7d4d-40b4-86a5-17a2391303cb' -export const seededCapacityTokenId = 12 export const seededOrderCreationId = 'ff3af974-7d4d-40b4-86a5-00a2241265cb' export const seededTransactionId = '1f3af974-7d4d-40b4-86a5-94a2241265cb' export const seededTransactionId2 = 'd65d8e11-150f-4ea4-b778-b920e9dbc378' export const seededProposalTransactionId = '8a5343dc-88a3-4b61-b156-330d52f506f8' export const seededAcceptTransactionId = 'd8eb8a94-222b-4481-b315-1dcbf2e07079' export const seededOrderId = 'ae350c28-f696-4e95-8467-d00507dfcc39' -export const seededOrderNotOwnedId = 'c88908aa-a2a6-48df-a698-572aa30159c0' -export const seededCapacityNotOwnedId = 'b21f865e-f4e9-4ae2-8944-de691e9eb4d9' -export const seededOrderTokenId = 11 + export const seededMatch2Id = 'f960e4a1-6182-4dd3-8ac2-6f3fad995551' -export const seededMatch2OrderNotOwnedId = 'ffb6a503-353c-40a3-94ce-bb04353b68df' -export const seededMatch2TokenId = 13 export const exampleDate = '2023-03-24T10:40:47.317Z' +export const seededOrderWithTokenId = '64d89075-0059-4a8a-87da-c6715d64d0a9' export const nonExistentId = 'a789ad47-91c3-446e-90f9-a7c9b233eaf9' export const seededCapacityMissingTokenId = 'b2348deb-d967-4317-8637-2867ced70356' @@ -42,6 +38,12 @@ export const seededMatch2NotAcceptableA = '46d7dbe8-aaef-472e-af9f-ecdd2681d3a5' export const seededMatch2NotAcceptableB = '097d3905-72aa-4517-85d2-0091d26fceac' export const seededMatch2NotAcceptableBoth = '619fb8ca-4dd9-4843-8c7a-9d9c9474784d' +const seededOrderNotOwnedId = 'c88908aa-a2a6-48df-a698-572aa30159c0' +const seededCapacityNotOwnedId = 'b21f865e-f4e9-4ae2-8944-de691e9eb4d9' +const seededCapacityWithTokenId = 'b005f4a1-400e-410e-aa72-8e97385f63e6' +const seededMatch2TokenId = 43 +const seededDemandTokenId = 42 + export const seed = async () => { await cleanup() @@ -60,8 +62,6 @@ export const seed = async () => { subtype: DemandSubtype.capacity, state: DemandState.created, parameters_attachment_id: parametersAttachmentId, - latest_token_id: seededCapacityTokenId, - original_token_id: seededCapacityTokenId, }, ]) @@ -96,8 +96,6 @@ export const seed = async () => { subtype: DemandSubtype.order, state: DemandState.created, parameters_attachment_id: parametersAttachmentId, - latest_token_id: seededOrderTokenId, - original_token_id: seededOrderTokenId, }, ]) @@ -108,8 +106,8 @@ export const seed = async () => { subtype: DemandSubtype.order, state: DemandState.created, parameters_attachment_id: parametersAttachmentId, - latest_token_id: seededOrderTokenId, - original_token_id: seededOrderTokenId, + latest_token_id: seededDemandTokenId, + original_token_id: seededDemandTokenId, }, ]) @@ -120,8 +118,8 @@ export const seed = async () => { subtype: DemandSubtype.capacity, state: DemandState.created, parameters_attachment_id: parametersAttachmentId, - latest_token_id: seededOrderTokenId, - original_token_id: seededOrderTokenId, + latest_token_id: seededDemandTokenId, + original_token_id: seededDemandTokenId, }, ]) @@ -139,20 +137,6 @@ export const seed = async () => { }, ]) - await db.match2().insert([ - { - id: seededMatch2OrderNotOwnedId, - state: Match2State.proposed, - optimiser: selfAddress, - member_a: notSelfAddress, - member_b: selfAddress, - demand_a_id: seededOrderNotOwnedId, - demand_b_id: seededCapacityId, - latest_token_id: seededMatch2TokenId, - original_token_id: seededMatch2TokenId, - }, - ]) - await db.transaction().insert([ { id: seededProposalTransactionId, @@ -209,6 +193,30 @@ export const seed = async () => { }, ]) + await db.demand().insert([ + { + id: seededCapacityWithTokenId, + owner: selfAddress, + subtype: DemandSubtype.capacity, + state: DemandState.created, + parameters_attachment_id: parametersAttachmentId, + latest_token_id: seededDemandTokenId, + original_token_id: seededDemandTokenId, + }, + ]) + + await db.demand().insert([ + { + id: seededOrderWithTokenId, + owner: selfAddress, + subtype: DemandSubtype.order, + state: DemandState.created, + parameters_attachment_id: parametersAttachmentId, + latest_token_id: seededDemandTokenId, + original_token_id: seededDemandTokenId, + }, + ]) + await db.demand().insert([ { id: seededCapacityAlreadyAllocated, @@ -273,7 +281,7 @@ export const seed = async () => { member_a: notSelfAddress, member_b: selfAddress, demand_a_id: seededOrderNotOwnedId, - demand_b_id: seededCapacityId, + demand_b_id: seededCapacityWithTokenId, latest_token_id: seededMatch2TokenId, original_token_id: seededMatch2TokenId, }, @@ -286,7 +294,7 @@ export const seed = async () => { optimiser: selfAddress, member_a: selfAddress, member_b: notSelfAddress, - demand_a_id: seededOrderId, + demand_a_id: seededOrderWithTokenId, demand_b_id: seededCapacityNotOwnedId, latest_token_id: seededMatch2TokenId, original_token_id: seededMatch2TokenId,