Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Transaction hash #705

Merged
merged 6 commits into from Nov 6, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions migrations/20180407120531-create-data-info.js
Expand Up @@ -32,8 +32,8 @@ module.exports = {
allowNull: false,
type: Sequelize.INTEGER,
},
transaction_hash: {
allowNull: true,
origin: {
allowNull: false,
type: Sequelize.STRING,
},
}),
Expand Down
4 changes: 4 additions & 0 deletions migrations/20180407123625-create-holding-data.js
Expand Up @@ -31,6 +31,10 @@ module.exports = {
allowNull: false,
type: Sequelize.STRING,
},
transaction_hash: {
allowNull: false,
type: Sequelize.STRING,
},
}),
down: (queryInterface, Sequelize) => queryInterface.dropTable('holding_data'),
};
4 changes: 4 additions & 0 deletions migrations/20180420094832-create-offers.js
Expand Up @@ -43,6 +43,10 @@ module.exports = {
type: Sequelize.STRING,
allowNull: false,
},
transaction_hash: {
allowNull: true,
type: Sequelize.STRING,
},
}),
down: queryInterface => queryInterface.dropTable('offers'),
};
20 changes: 20 additions & 0 deletions migrations/201807132331112-create-purchased-data.js
@@ -0,0 +1,20 @@

module.exports = {
up: (queryInterface, Sequelize) => queryInterface.createTable('purchased_data', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER,
},
data_set_id: {
allowNull: false,
type: Sequelize.STRING,
},
transaction_hash: {
allowNull: false,
type: Sequelize.STRING,
},
}),
down: (queryInterface, Sequelize) => queryInterface.dropTable('purchased_data'),
};
2 changes: 1 addition & 1 deletion models/data_info.js
Expand Up @@ -7,7 +7,7 @@ module.exports = (sequelize, DataTypes) => {
root_hash: DataTypes.STRING(40),
import_timestamp: DataTypes.DATE,
data_size: DataTypes.INTEGER,
transaction_hash: DataTypes.STRING(128),
origin: DataTypes.STRING,
}, {
tableName: 'data_info',
});
Expand Down
1 change: 1 addition & 0 deletions models/holding_data.js
Expand Up @@ -7,6 +7,7 @@ module.exports = (sequelize, DataTypes) => {
distribution_public_key: DataTypes.STRING,
distribution_private_key: DataTypes.STRING,
distribution_epk: DataTypes.STRING,
transaction_hash: DataTypes.STRING(128),
}, {
tableName: 'holding_data',
});
Expand Down
1 change: 1 addition & 0 deletions models/offers.js
Expand Up @@ -18,6 +18,7 @@ module.exports = (sequelize, DataTypes) => {
task: DataTypes.STRING,
status: DataTypes.STRING,
message: DataTypes.STRING,
transaction_hash: DataTypes.STRING(128),
}, {});
offers.associate = (models) => {
// associations can be defined here
Expand Down
13 changes: 13 additions & 0 deletions models/purchased_data.js
@@ -0,0 +1,13 @@

module.exports = (sequelize, DataTypes) => {
const purchased_data = sequelize.define('purchased_data', {
data_set_id: DataTypes.STRING,
transaction_hash: DataTypes.STRING(128),
}, {
tableName: 'purchased_data',
});
purchased_data.associate = (models) => {
// associations can be defined here
};
return purchased_data;
};
1 change: 1 addition & 0 deletions modules/DVService.js
Expand Up @@ -127,6 +127,7 @@ class DVService {
data_provider_wallet,
import_timestamp: new Date(),
data_size: dataSize,
origin: 'PURCHASED',
});

// Check if enough tokens. From smart contract:
Expand Down
15 changes: 10 additions & 5 deletions modules/EventEmitter.js
Expand Up @@ -229,6 +229,9 @@ class EventEmitter {
data.response.status(204);
data.response.send(result);
} else {
const transactionHash = await ImportUtilities
.getTransactionHash(dataSetId, dataInfo.origin);

data.response.status(200);
data.response.send({
import: ImportUtilities.normalizeImport(
Expand All @@ -237,7 +240,7 @@ class EventEmitter {
result.edges,
),
root_hash: dataInfo.root_hash,
transaction: dataInfo.transaction_hash,
transaction: transactionHash,
data_provider_wallet: dataInfo.data_provider_wallet,
});
}
Expand Down Expand Up @@ -277,14 +280,16 @@ class EventEmitter {
try {
const dataimports = await Models.data_info.findAll();
data.response.status(200);
data.response.send(dataimports.map(di => ({
const promises = dataimports.map(async di => ({
data_set_id: di.data_set_id,
total_documents: di.total_documents,
root_hash: di.root_hash,
data_size: di.data_size,
transaction_hash: di.transaction_hash,
transaction_hash: await ImportUtilities
.getTransactionHash(di.data_set_id, di.origin),
data_provider_wallet: di.data_provider_wallet,
})));
}));
data.response.send(await Promise.all(promises));
} catch (e) {
logger.error('Failed to get information about imports', e);
data.response.status(500);
Expand Down Expand Up @@ -484,7 +489,7 @@ class EventEmitter {
import_timestamp: new Date(),
total_documents,
data_size: dataSize,
transaction_hash: null,
origin: 'IMPORTED',
}).catch((error) => {
logger.error(error);
notifyError(error);
Expand Down
42 changes: 42 additions & 0 deletions modules/ImportUtilities.js
Expand Up @@ -7,6 +7,8 @@ const uuidv4 = require('uuid/v4');
const { sha3_256 } = require('js-sha3');
const { denormalizeGraph, normalizeGraph } = require('./Database/graph-converter');

const Models = require('../models');

/**
* Import related utilities
*/
Expand Down Expand Up @@ -256,6 +258,46 @@ class ImportUtilities {
static immutableFilterClassVertices(vertices) {
return vertices.filter(vertex => vertex.vertex_type !== 'CLASS');
}

/**
* Gets transaction hash for the data set
* @param dataSetId Data set ID
* @param origin Data set origin
* @return {Promise<string|null>}
*/
static async getTransactionHash(dataSetId, origin) {
let transactionHash = null;

switch (origin) {
case 'PURCHASED': {
const purchasedData = await Models.purchased_data.findOne({
where: { data_set_id: dataSetId },
});
transactionHash = purchasedData.transaction_hash;
break;
}
case 'HOLDING': {
const holdingData = await Models.holding_data.findOne({
where: { data_set_id: dataSetId },
});
transactionHash = holdingData.transaction_hash;
break;
}
case 'IMPORTED': {
// TODO support many offers for the same data set
const offers = await Models.offers.findAll({
where: { data_set_id: dataSetId },
});
if (offers.length > 0) {
transactionHash = offers[0].transaction_hash;
}
break;
}
default:
throw new Error(`Failed to find transaction hash for ${dataSetId} and origin ${origin}. Origin not valid.`);
}
return transactionHash;
}
}

module.exports = ImportUtilities;
5 changes: 3 additions & 2 deletions modules/command/dc/dc-offer-create-bc-command.js
Expand Up @@ -33,7 +33,7 @@ class DCOfferCreateBcCommand extends Command {
litigationIntervalInMinutes,
} = command.data;

await this.blockchain.createOffer(
const result = await this.blockchain.createOffer(
Utilities.normalizeHex(this.config.erc725Identity),
dataSetId,
dataRootHash,
Expand All @@ -49,9 +49,10 @@ class DCOfferCreateBcCommand extends Command {
this.logger.important(`Offer with internal ID ${internalOfferId} for data set ${dataSetId} written to blockchain. Waiting for DHs...`);

const offer = await Models.offers.findOne({ where: { id: internalOfferId } });
offer.transaction_hash = result.transactionHash;
offer.status = 'PUBLISHED';
offer.message = 'Offer has been published to Blockchain';
await offer.save({ fields: ['status', 'message'] });
await offer.save({ fields: ['status', 'message', 'transaction_hash'] });

await this.blockchain.executePlugin('fingerprint-plugin', {
dataSetId,
Expand Down
2 changes: 1 addition & 1 deletion modules/command/dc/dc-replication-request-command.js
Expand Up @@ -81,9 +81,9 @@ class DCReplicationRequestCommand extends Command {
distribution_epk_checksum: replication.distributionEpkChecksum,
litigation_root_hash: replication.litigationRootHash,
distribution_root_hash: replication.distributionRootHash,
transaction_hash: replication.transaction_hash,
distribution_epk: replication.distributionEpk,
distribution_signature: distributionSignature.signature,
transaction_hash: offer.transaction_hash,
distributionSignature,
},
};
Expand Down
7 changes: 4 additions & 3 deletions modules/command/dh/dh-data-read-request-free-command.js
Expand Up @@ -48,8 +48,6 @@ class DHDataReadRequestFreeCommand extends Command {
throw Error(`Couldn't find reply with ID ${id}.`);
}

const offer = networkReplyModel.data;

if (networkReplyModel.receiver_wallet !== wallet &&
networkReplyModel.receiver_identity) {
throw Error('Sorry not your read request');
Expand Down Expand Up @@ -97,6 +95,9 @@ class DHDataReadRequestFreeCommand extends Command {
);
}

const transactionHash = await ImportUtilities
.getTransactionHash(dataInfo.data_set_id, dataInfo.origin);

/*
dataReadResponseObject = {
message: {
Expand Down Expand Up @@ -126,7 +127,7 @@ class DHDataReadRequestFreeCommand extends Command {
edges,
},
data_set_id: importId, // TODO: Temporal. Remove it.
transaction_hash: dataInfo.transaction_hash,
transaction_hash: transactionHash,
};
const dataReadResponseObject = {
message: replyMessage,
Expand Down
3 changes: 3 additions & 0 deletions modules/command/dh/dh-replication-import-command.js
Expand Up @@ -45,6 +45,7 @@ class DhReplicationImportCommand extends Command {
litigationRootHash,
distributionRootHash,
distributionSignature,
transactionHash,
} = command.data;
const decryptedVertices =
await ImportUtilities.immutableDecryptVertices(litigationVertices, litigationPublicKey);
Expand Down Expand Up @@ -123,6 +124,7 @@ class DhReplicationImportCommand extends Command {
data_provider_wallet: importResult.wallet,
import_timestamp: new Date(),
data_size: dataSize,
origin: 'HOLDING',
});

// Store holding information and generate keys for eventual data replication.
Expand All @@ -133,6 +135,7 @@ class DhReplicationImportCommand extends Command {
distribution_public_key: distributionPublicKey,
distribution_private_key: distributionPrivateKey,
distribution_epk: distributionEpk,
transaction_hash: transactionHash,
});

this.logger.important(`[DH] Replication finished for offer ID ${offerId}`);
Expand Down
6 changes: 6 additions & 0 deletions modules/command/dv/dv-data-read-response-free-command.js
Expand Up @@ -129,6 +129,12 @@ class DVDataReadResponseFreeCommand extends Command {
data_provider_wallet: dcWallet,
import_timestamp: new Date(),
data_size: dataSize,
origin: 'PURCHASED',
});

// Store holding information and generate keys for eventual data replication.
await Models.purchased_data.create({
data_set_id: dataSetId,
transaction_hash,
});

Expand Down
3 changes: 3 additions & 0 deletions test/bdd/steps/network.js
Expand Up @@ -320,6 +320,7 @@ Then(/^the last import should be the same on all nodes that replicated data$/, a
node.state.node_rpc_url,
this.state.lastImport.data_set_id,
);
expect(dhImportInfo.transaction, 'DH transaction hash should be defined').to.not.be.undefined;
// TODO: fix different root hashes error.
dhImportInfo.root_hash = dcImportInfo.root_hash;
if (deepEqual(dcImportInfo, dhImportInfo)) {
Expand Down Expand Up @@ -363,6 +364,8 @@ Then(/^the last import should be the same on all nodes that purchased data$/, as
if (!deepEqual(dcImportInfo, dvImportInfo)) {
throw Error(`Objects not equal: ${JSON.stringify(dcImportInfo)} and ${JSON.stringify(dvImportInfo)}`);
}
expect(dcImportInfo.transaction, 'DC transaction hash should be defined').to.not.be.undefined;
expect(dvImportInfo.transaction, 'DV transaction hash should be defined').to.not.be.undefined;
});

Given(/^I remember previous import's fingerprint value$/, async function () {
Expand Down