Skip to content

Commit

Permalink
Merge pull request #233 from davidkel/fixreadwrite
Browse files Browse the repository at this point in the history
Minor improvements to read/write asset benchmark with information in README
  • Loading branch information
davidkel committed Sep 28, 2022
2 parents f9f22f4 + 9f1f378 commit c112f12
Show file tree
Hide file tree
Showing 9 changed files with 116 additions and 50 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Expand Up @@ -19,3 +19,7 @@ package-lock.json
/package.json

**/__pycache__/**

pkg
bin
.vscode
37 changes: 33 additions & 4 deletions benchmarks/api/fabric/README.md
Expand Up @@ -2,8 +2,37 @@

This folder contains tests that target Fabric shim API methods that may be called within chaincode

Fabric chaincode may be written in two ways:
- Using a Contract wrapper
- Coding direct to the shim
## Read/Write Asset

These methods are split within the contract and base folders respectively.
The read/write asset benchmark is a more complex benchmark as it requires that you run a preload round or separate benchmark to load the SUT with a specific number of assets, it also provides the capability to delete those created assets.

The example shows a create, followed by read/writes, followed by a delete which abides by the following rules (which must be adhered to if you split these rounds into separate benchmarks)

1. For preload-assets and delete-preloaded assets your must ensure that the round uses TxNumber and that TxNumber matches the number of workers.
2. The number of workers must be the same for preload-assets, read-write-assets and delete-preloaded assets. If you want to use a different number of workers you need to delete the preloaded assets first and create them again.
3. The number of assets should be the same for preload-assets, read-write-assets and delete-preloaded assets. However you could use less assets for the read/write benchmark if required

Given that this will read and write to an asset, there are likely to be failures if the same key is read and updated (ie MVCC_READ_CONFLICT). This is an expected failure and if using the 2.4 binding will result in an error such as

```
2022.09.26-10:25:06.322 error [caliper] [connectors/peer-gateway/PeerGateway] Failed to perform submit transaction [readWriteAssets] using arguments [["client1_100_201","client1_100_0"],["client1_100_201","client1_100_0"],C], with error: Error: Failed to submit trasaction with status code: 11
```

as status code of 11 is MVCC_READ_CONFLICT. These errors are expected and normal clients would handle this by perhaps resubmitting the transaction or replying that the record was updated and to refresh to review. Caliper currently will register this as a transaction failure but it's not entirely true.

### workload options

The following extra arguments (compared to the rest of asset benchmarks) can be seen in this example
```yaml
readCount: 2
write:
count: 2
writeMode: allread
```

- readCount defines the number of assets to be read
- count under write defines the number of assets to be written to
- writeMode has 3 options: allread, notread, random
- allread means write to all the assets that were read (it accounts for differences in readCount and write count)
- notread means it will explicitly not write the the assets that were read
- random means that no checks are made it randonly selects assets to read/write and this set may are may not contain the same asset to read and write to
6 changes: 3 additions & 3 deletions benchmarks/api/fabric/workloads/helper.js
Expand Up @@ -29,7 +29,7 @@ module.exports.retrieveRandomAssetIds = function(assetNumber) {
* @param {number} startRange beginning of range from which get the number of random numbers
* @param {number} finishRange end of range from which get the number of random numbers
*/
module.exports.retrieveRandomAssetIdsFromRange = function(assetNumber, startRange, finishRange){
module.exports.retrieveRandomAssetIdsFromRange = function(assetNumber, startRange, finishRange) {
const difference = finishRange - startRange;
const ids = [];
while (ids.length < assetNumber) {
Expand Down Expand Up @@ -90,7 +90,7 @@ module.exports.addBatchAssets = async function(bcObj, context, clientIdx, args,
}

// -Insert each batch
for (const index in batches){
for (const index in batches) {
const batch = batches[index];
try {
let myArgs;
Expand Down Expand Up @@ -177,7 +177,7 @@ module.exports.addMixedBatchAssets = async function(bcObj, context, clientIdx, a

// -Insert each batch
console.log(' -> Adding ' + batches.length + ' batch(es) to DB');
for (const index in batches){
for (const index in batches) {
const batch = batches[index];
try {
const myArgs = {
Expand Down
Expand Up @@ -21,6 +21,8 @@ class DeletePreloadedAssetsWorkload extends WorkloadModuleBase {
this.assetsPerWorker = [];
this.asset = {};
this.byteSize = 0;
this.assetPrefix = null;
this.transactionHasRun = false;
}

/**
Expand All @@ -43,17 +45,27 @@ class DeletePreloadedAssetsWorkload extends WorkloadModuleBase {
this.batchSize = args.batchSize ? parseInt(args.batchSize) : 1;
this.assetsPerWorker = helper.getAssetsPerWorker(args.assets, this.workerIndex, totalWorkers);
this.batchesNum = Math.ceil(this.assetsPerWorker/this.batchSize);
this.assetPrefix = 'client' + this.workerIndex + '_' + this.byteSize + '_';
this.transactionHasRun = false;
}

/**
* Assemble TXs for the round.
* @return {Promise<TxStatus[]>}
*/
async submitTransaction() {
if (this.transactionHasRun) {
console.log('A Worker should only ever submit this transaction once, please make sure TxNumber and Workers are the same value');
console.log('Transaction NOT submitted');
return;
}

this.transactionHasRun = true;

for (let i = 0; i < this.batchesNum; i++) {
const keys = []
for (let i = 0; (i < this.batchSize) && (this.txIndex < this.assetsPerWorker); i++) {
const key = 'client' + this.workerIndex + '_' + this.byteSize + '_' + this.txIndex;
const key = this.assetPrefix + this.txIndex;
keys.push(key);
this.txIndex++;
}
Expand Down
Expand Up @@ -21,6 +21,8 @@ class PreloadAssetsWorkload extends WorkloadModuleBase {
this.assetsPerWorker = [];
this.asset = {};
this.byteSize = 0;
this.assetPrefix = null;
this.transactionHasRun = false;
}

/**
Expand Down Expand Up @@ -52,18 +54,27 @@ class PreloadAssetsWorkload extends WorkloadModuleBase {
};

const paddingSize = this.byteSize - helper.bytes(JSON.stringify(this.asset));
this.assetPrefix = 'client' + this.workerIndex + '_' + this.byteSize + '_';
this.asset.content = 'B'.repeat(paddingSize);
this.transactionHasRun = false;
}

/**
* Assemble TXs for the round.
* @return {Promise<TxStatus[]>}
*/
async submitTransaction() {
for(let i = 0; i < this.batchesNum; i++){
if (this.transactionHasRun) {
console.log('A Worker should only ever submit this transaction once, please make sure TxNumber and Workers are the same value');
console.log('Transaction NOT submitted');
return;
}

this.transactionHasRun = true;
for (let i = 0; i < this.batchesNum; i++) {
let batch = [];
for (let i = 0; (i < this.batchSize) && (this.txIndex < this.assetsPerWorker); i++) {
this.asset.uuid = 'client' + this.workerIndex + '_' + this.byteSize + '_' + this.txIndex;
this.asset.uuid = this.assetPrefix + this.txIndex;
const batchAsset = JSON.parse(JSON.stringify(this.asset));
batch.push(batchAsset);
this.txIndex++;
Expand Down
Expand Up @@ -216,14 +216,19 @@ func (contract *FixedAssetContract) ReadWriteAssets(ctx utils.Context, readIds [

fixedAsset := assets.FixedAsset{}
var bytes []byte
var id string

for _, id := range readIds {
for _, id = range readIds {
var err error
bytes, err = ctx.GetStub().GetState(id)

if err != nil {
fmt.Println("Error performing GetState: " + err.Error())
return err
return fmt.Errorf("Error performing GetState on %s: %s", id, err.Error())
}

if bytes == nil {
return fmt.Errorf("tried to read asset: %s which doesn't exist", id)
}
}

Expand All @@ -232,7 +237,7 @@ func (contract *FixedAssetContract) ReadWriteAssets(ctx utils.Context, readIds [
if err != nil {
fmt.Println("Error performing json.Unmarshal: " + err.Error())
fmt.Println("Error performing json.Unmarshal on bytes: " + string(bytes[:]))
return err
return fmt.Errorf("tried to read asset: %s. with error: %s. bytes: %s", id, err.Error(), string(bytes[:]))
}

maxPaddingSize := len(fixedAsset.Content) + len(fixedAsset.UUID)
Expand All @@ -244,7 +249,7 @@ func (contract *FixedAssetContract) ReadWriteAssets(ctx utils.Context, readIds [
err := ctx.GetStub().PutState(id, bytes)
if err != nil {
fmt.Println("Error performing PutState: " + err.Error())
return err
return fmt.Errorf("Error performing PutState on %s: %s", id, err.Error())
}
}

Expand Down
50 changes: 25 additions & 25 deletions src/fabric/api/fixed-asset-base/node/fixed-asset-base.js
Expand Up @@ -44,11 +44,11 @@ const FixedAssetBase = class {
return shim.error(err);
}
}

/**
* Return a null response
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
*/
async emptyContract(stub, args) {
if (isVerbose) {
Expand All @@ -66,15 +66,15 @@ const FixedAssetBase = class {
* content: variable content
* }
* Directly writes the string content against the passed uuid
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
*/
async createAsset(stub, args) {
const uuid = args[0];
const content = args[1];
if (isVerbose) {
console.log('Entering createAsset');
}
}
await stub.putState(uuid, Buffer.from(content));
if (isVerbose) {
console.log('Exiting createAsset');
Expand All @@ -90,8 +90,8 @@ const FixedAssetBase = class {
* content: variable content
* }
* The body is parsed to include a step where the body is an object
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
*/
async createAssetObject(stub, args) {
if (isVerbose) {
Expand All @@ -112,8 +112,8 @@ const FixedAssetBase = class {
* byteSize: target byteSize of asset
* content: variable content
* }, ...]
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
*/
async createAssetsFromBatch(stub, args) {
if (isVerbose) {
Expand All @@ -134,8 +134,8 @@ const FixedAssetBase = class {
/**
* Get an Asset from the registry that was created by createAsset
* - directly returns the string
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* @returns the result of the query
*/
async getAsset(stub, args) {
Expand All @@ -149,8 +149,8 @@ const FixedAssetBase = class {
/**
* Get an Asset from the registry that was created by createAsset
* -includes a parse stage to enable cast of the object
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* @returns the result of the query
*/
async getAssetObject(stub, args) {
Expand All @@ -167,8 +167,8 @@ const FixedAssetBase = class {

/**
* Get all Assets from the registry using a passed array of UUIDs
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* @returns the result of the query
*/
async getAssetsFromBatch(stub, args) {
Expand All @@ -190,8 +190,8 @@ const FixedAssetBase = class {

/**
* Delete an Asset from the registry that was created by createAsset
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* @returns the result of the delete
*/
async deleteAsset(stub, args) {
Expand All @@ -204,7 +204,7 @@ const FixedAssetBase = class {

/**
* Delete a batch of assets from the registry
* @param {*} stub
* @param {*} stub
* @param {*} args
*/
async deleteAssetsFromBatch(stub, args) {
Expand All @@ -223,8 +223,8 @@ const FixedAssetBase = class {

/**
* Run a paginated rich query
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* @returns {JSON} the results of the paginated query and responseMetadata in a JSON object
*/
async paginatedRichQuery(stub, args) {
Expand All @@ -245,7 +245,7 @@ const FixedAssetBase = class {
Bookmark: metadata.bookmark,
};
} else {

const { iterator, metadata } = await stub.getQueryResultWithPagination(queryString, pageSize);
response.results = await this.getAllResults(iterator);
response.responseMetadata = {
Expand Down Expand Up @@ -305,7 +305,7 @@ const FixedAssetBase = class {
const assetAsBytes = await stub.getState(id);

if (!assetAsBytes || assetAsBytes.length === 0) {
throw new Error(`Asset with id ${id} was not successfully retrieved`);
throw new Error(`Asset with id ${id} does not exist`);
}

fixedAssetBytes = assetAsBytes;
Expand All @@ -330,8 +330,8 @@ const FixedAssetBase = class {

/**
* Run a paginated range query on the DB contents
* @param {*} stub
* @param {*} args
* @param {*} stub
* @param {*} args
* startKey - the first key in the range of interest
* endKey - the end key in the range of interest
* pagesize - the pagesize to return
Expand Down
13 changes: 9 additions & 4 deletions src/fabric/api/fixed-asset/go/FixedAssetContract.go
Expand Up @@ -299,14 +299,19 @@ func (s *SmartContract) ReadWriteAssets(ctx contractapi.TransactionContextInterf

fixedAsset := assets.FixedAsset{}
var bytes []byte
var id string

for _, id := range readIds {
for _, id = range readIds {
var err error
bytes, err = ctx.GetStub().GetState(id)

if err != nil {
fmt.Println("Error performing GetState: " + err.Error())
return err
return fmt.Errorf("Error performing GetState on %s: %s", id, err.Error())
}

if bytes == nil {
return fmt.Errorf("tried to read asset: %s which doesn't exist", id)
}
}

Expand All @@ -315,7 +320,7 @@ func (s *SmartContract) ReadWriteAssets(ctx contractapi.TransactionContextInterf
if err != nil {
fmt.Println("Error performing json.Unmarshal: " + err.Error())
fmt.Println("Error performing json.Unmarshal on bytes: " + string(bytes[:]))
return err
return fmt.Errorf("tried to read asset: %s. with error: %s. bytes: %s", id, err.Error(), string(bytes[:]))
}

maxPaddingSize := len(fixedAsset.Content) + len(fixedAsset.UUID)
Expand All @@ -327,7 +332,7 @@ func (s *SmartContract) ReadWriteAssets(ctx contractapi.TransactionContextInterf
err := ctx.GetStub().PutState(id, bytes)
if err != nil {
fmt.Println("Error performing PutState: " + err.Error())
return err
return fmt.Errorf("Error performing PutState on %s: %s", id, err.Error())
}
}

Expand Down

0 comments on commit c112f12

Please sign in to comment.