Skip to content

Commit

Permalink
Merge pull request #2690 from LiskHQ/2622-replace-blocks-getters
Browse files Browse the repository at this point in the history
Replace blocks getter interfaces with storage.entities.Block.get - Closes #2622
  • Loading branch information
MaciejBaj committed Jan 7, 2019
2 parents 7bff46c + 687d0f0 commit 8fb6154
Show file tree
Hide file tree
Showing 22 changed files with 352 additions and 240 deletions.
32 changes: 32 additions & 0 deletions logic/block.js
Original file line number Diff line number Diff line change
Expand Up @@ -516,4 +516,36 @@ Block.prototype.dbRead = function(raw) {
return block;
};

/**
* Creates block object based on raw data.
*
* @param {Object} raw
* @returns {null|block} Block object
* @todo Add description for the params
*/
Block.prototype.storageRead = function(raw) {
if (!raw.id) {
return null;
}
const block = {
id: raw.id,
version: parseInt(raw.version),
timestamp: parseInt(raw.timestamp),
height: parseInt(raw.height),
previousBlock: raw.previousBlockId,
numberOfTransactions: parseInt(raw.numberOfTransactions),
totalAmount: new Bignum(raw.totalAmount),
totalFee: new Bignum(raw.totalFee),
reward: new Bignum(raw.reward),
payloadLength: parseInt(raw.payloadLength),
payloadHash: raw.payloadHash,
generatorPublicKey: raw.generatorPublicKey,
generatorId: __private.getAddressByPublicKey(raw.generatorPublicKey),
blockSignature: raw.blockSignature,
confirmations: parseInt(raw.confirmations),
};
block.totalForged = block.totalFee.plus(block.reward).toString();
return block;
};

module.exports = Block;
5 changes: 5 additions & 0 deletions modules/blocks.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ class Blocks {
api: new BlocksAPI(
scope.logger,
scope.db,
scope.storage,
scope.logic.block,
scope.schema
),
Expand All @@ -73,6 +74,7 @@ class Blocks {
scope.logic.block,
scope.logic.transaction,
scope.db,
scope.storage,
scope.config
),
process: new BlocksProcess(
Expand All @@ -82,6 +84,7 @@ class Blocks {
scope.logic.transaction,
scope.schema,
scope.db,
scope.storage,
scope.sequence,
scope.genesisBlock
),
Expand All @@ -91,13 +94,15 @@ class Blocks {
scope.logic.block,
scope.logic.transaction,
scope.db,
scope.storage,
scope.genesisBlock
),
chain: new BlocksChain(
scope.logger,
scope.logic.block,
scope.logic.transaction,
scope.db,
scope.storage,
scope.genesisBlock,
scope.bus,
scope.balancesSequence
Expand Down
116 changes: 35 additions & 81 deletions modules/blocks/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

const apiCodes = require('../../helpers/api_codes.js');
const ApiError = require('../../helpers/api_error.js');
const sortBy = require('../../helpers/sort_by.js').sortBy;

let library;
let self;
Expand All @@ -39,10 +38,11 @@ var modules;
* @todo Add description for the params
*/
class API {
constructor(logger, db, block, schema) {
constructor(logger, db, storage, block, schema) {
library = {
logger,
db,
storage,
schema,
logic: {
block,
Expand Down Expand Up @@ -77,107 +77,61 @@ class API {
* @returns {Object} cb.data - List of normalized blocks
*/
__private.list = function(filter, cb) {
const params = {};
const where = [];

if (filter.id) {
where.push('"b_id" = ${id}');
params.id = filter.id;
}

if (filter.generatorPublicKey) {
where.push('"b_generatorPublicKey"::bytea = ${generatorPublicKey}');
params.generatorPublicKey = filter.generatorPublicKey;
}

// FIXME: Useless condition
if (filter.numberOfTransactions) {
where.push('"b_numberOfTransactions" = ${numberOfTransactions}');
params.numberOfTransactions = filter.numberOfTransactions;
}

if (filter.previousBlock) {
where.push('"b_previousBlock" = ${previousBlock}');
params.previousBlock = filter.previousBlock;
}

if (filter.height === 0 || filter.height > 0) {
where.push('"b_height" = ${height}');
params.height = filter.height;
}

if (filter.fromTimestamp >= 0) {
where.push('"b_timestamp" >= ${fromTimestamp}');
params.fromTimestamp = filter.fromTimestamp;
}

if (filter.toTimestamp >= 1) {
where.push('"b_timestamp" <= ${toTimestamp}');
params.toTimestamp = filter.toTimestamp;
}

// FIXME: Useless condition
if (filter.totalAmount >= 0) {
where.push('"b_totalAmount" = ${totalAmount}');
params.totalAmount = filter.totalAmount;
}

// FIXME: Useless condition
if (filter.totalFee >= 0) {
where.push('"b_totalFee" = ${totalFee}');
params.totalFee = filter.totalFee;
}
const options = {};

const filters = {
id: filter.id,
generatorPublicKey: filter.generatorPublicKey,
numberOfTransactions: filter.numberOfTransactions,
previousBlockId: filter.previousBlock,
height: filter.height,
timestamp_gte: filter.fromTimestamp,
timestamp_lte: filter.toTimestamp,
totalAmount: filter.totalAmount,
totalFee: filter.totalFee,
reward: filter.reward,
};

// FIXME: Useless condition
if (filter.reward >= 0) {
where.push('"b_reward" = ${reward}');
params.reward = filter.reward;
}
Object.keys(filters).forEach(key => {
if (!filters[key]) {
delete filters[key];
}
});

if (!filter.limit) {
params.limit = 100;
options.limit = 100;
} else {
params.limit = Math.abs(filter.limit);
options.limit = Math.abs(filter.limit);
}

if (!filter.offset) {
params.offset = 0;
options.offset = 0;
} else {
params.offset = Math.abs(filter.offset);
options.offset = Math.abs(filter.offset);
}

if (params.limit > 100) {
if (options.limit > 100) {
return setImmediate(cb, 'Invalid limit. Maximum is 100');
}

const sort = sortBy(filter.sort || 'height:desc', {
sortFields: library.db.blocks.sortFields,
fieldPrefix: 'b_',
});
options.sort = filter.sort || 'height:desc';
const [sortField, sortMethod = 'ASC'] = options.sort.split(':');

if (sort.error) {
return setImmediate(cb, sort.error);
if (
!library.db.blocks.sortFields.includes(sortField) ||
!['ASC', 'DESC'].includes(sortMethod.toUpperCase())
) {
return setImmediate(cb, 'Invalid sort field');
}

return library.db.blocks
.list(
Object.assign(
{},
{
where,
sortField: sort.sortField,
sortMethod: sort.sortMethod,
},
params
)
)
return library.storage.entities.Block.get(filters, options)
.then(rows => {
const blocks = [];
const rowCount = rows.length;
// Normalize blocks
for (let i = 0; i < rowCount; i++) {
// FIXME: Can have poor performance because it performs SHA256 hash calculation for each block
const block = library.logic.block.dbRead(rows[i]);
const block = library.logic.block.storageRead(rows[i]);
blocks.push(block);
}
return setImmediate(cb, null, blocks);
Expand Down
29 changes: 16 additions & 13 deletions modules/blocks/chain.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,15 @@ class Chain {
block,
transaction,
db,
storage,
genesisBlock,
bus,
balancesSequence
) {
library = {
logger,
db,
storage,
genesisBlock,
bus,
balancesSequence,
Expand All @@ -80,20 +82,20 @@ class Chain {
Chain.prototype.saveGenesisBlock = function(cb) {
// Check if genesis block ID already exists in the database
// FIXME: Duplicated, there is another SQL query that we can use for that
library.db.blocks
.getGenesisBlockId(library.genesisBlock.block.id)
.then(rows => {
const blockId = rows.length && rows[0].id;

if (!blockId) {
// If there is no block with genesis ID - save to database
// WARNING: DB_WRITE
// FIXME: This will fail if we already have genesis block in database, but with different ID
return self.saveBlock(library.genesisBlock.block, err =>
setImmediate(cb, err)
);
library.storage.entities.Block.isPersisted({
id: library.genesisBlock.block.id,
})
.then(isPersisted => {
if (isPersisted) {
return setImmediate(cb);
}
return setImmediate(cb);

// If there is no block with genesis ID - save to database
// WARNING: DB_WRITE
// FIXME: This will fail if we already have genesis block in database, but with different ID
return self.saveBlock(library.genesisBlock.block, err =>
setImmediate(cb, err)
);
})
.catch(err => {
library.logger.error(err.stack);
Expand Down Expand Up @@ -194,6 +196,7 @@ Chain.prototype.deleteBlock = function(blockId, cb, tx) {
* @returns {Object} cb.res - SQL response
*/
Chain.prototype.deleteAfterBlock = function(blockId, cb) {
// TODO: REPLACE BY STORAGE WHEN DELETE IS IMPLEMENTED
library.db.blocks
.deleteAfterBlock(blockId)
.then(res => setImmediate(cb, null, res))
Expand Down
45 changes: 24 additions & 21 deletions modules/blocks/process.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,15 @@ class Process {
transaction,
schema,
db,
storage,
sequence,
genesisBlock
) {
library = {
logger,
schema,
db,
storage,
sequence,
genesisBlock,
logic: {
Expand Down Expand Up @@ -312,28 +314,28 @@ Process.prototype.getCommonBlock = function(peer, height, cb) {
},
function(common, waterCb) {
// Check that block with ID, previousBlock and height exists in database
library.db.blocks
.getCommonBlock({
id: common.id,
previousBlock: common.previousBlock,
height: common.height,
})
.then(rows => {
if (!rows.length || !rows[0].count) {
// Block doesn't exists - comparison failed
comparisonFailed = true;
return setImmediate(
waterCb,
[
'Chain comparison failed with peer:',
peer.string,
'using block:',
JSON.stringify(common),
].join(' ')
);
library.storage.entities.Block.isPersisted({
id: common.id,
previousBlockId: common.previousBlock,
height: common.height,
})
.then(isPersisted => {
if (isPersisted) {
// Block exists - it's common between our node and remote peer
return setImmediate(waterCb, null, common);
}
// Block exists - it's common between our node and remote peer
return setImmediate(waterCb, null, common);

// Block doesn't exists - comparison failed
comparisonFailed = true;
return setImmediate(
waterCb,
[
'Chain comparison failed with peer:',
peer.string,
'using block:',
JSON.stringify(common),
].join(' ')
);
})
.catch(err => {
// SQL error occurred
Expand Down Expand Up @@ -376,6 +378,7 @@ Process.prototype.loadBlocksOffset = function(limit, offset, cb) {
// Loads full blocks from database
// FIXME: Weird logic in that SQL query, also ordering used can be performance bottleneck - to rewrite
library.db.blocks
// TODO: REPLACE BY STORAGE WHEN EXTENDED BLOCK IS IMPLEMENTED
.loadBlocksOffset(params.offset, params.limit)
.then(rows => {
// Normalize blocks
Expand Down

0 comments on commit 8fb6154

Please sign in to comment.