From a5701495c7a3767584d6573d778074d61442da17 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 4 Dec 2022 20:55:41 -0500 Subject: [PATCH 1/5] Move DB logic to firefly-common impl Signed-off-by: Peter Broadhurst --- go.mod | 2 +- go.sum | 12 + internal/batch/batch_processor.go | 8 +- internal/batch/batch_processor_test.go | 16 +- internal/data/data_manager_test.go | 6 +- internal/database/sqlcommon/batch_sql.go | 35 +- internal/database/sqlcommon/batch_sql_test.go | 18 +- internal/database/sqlcommon/blob_sql.go | 45 +- internal/database/sqlcommon/blob_sql_test.go | 22 +- .../sqlcommon/blockchainevents_sql.go | 22 +- .../sqlcommon/blockchainevents_sql_test.go | 10 +- internal/database/sqlcommon/chart_sql.go | 2 +- internal/database/sqlcommon/chart_sql_test.go | 2 +- internal/database/sqlcommon/config.go | 3 +- .../database/sqlcommon/contractapis_sql.go | 23 +- .../sqlcommon/contractapis_sql_test.go | 4 +- .../sqlcommon/contractlisteners_sql.go | 27 +- .../sqlcommon/contractlisteners_sql_test.go | 14 +- internal/database/sqlcommon/data_sql.go | 56 +- internal/database/sqlcommon/data_sql_test.go | 28 +- internal/database/sqlcommon/datatype_sql.go | 23 +- .../database/sqlcommon/datatype_sql_test.go | 14 +- internal/database/sqlcommon/event_sql.go | 60 +- internal/database/sqlcommon/event_sql_test.go | 45 +- internal/database/sqlcommon/ffi_events_sql.go | 23 +- .../database/sqlcommon/ffi_events_sql_test.go | 4 +- .../database/sqlcommon/ffi_methods_sql.go | 23 +- .../sqlcommon/ffi_methods_sql_test.go | 4 +- internal/database/sqlcommon/ffi_sql.go | 23 +- internal/database/sqlcommon/ffi_sql_test.go | 4 +- internal/database/sqlcommon/filter_sql.go | 234 ------- .../database/sqlcommon/filter_sql_test.go | 227 ------- internal/database/sqlcommon/group_sql.go | 36 +- internal/database/sqlcommon/group_sql_test.go | 53 +- internal/database/sqlcommon/identity_sql.go | 28 +- .../database/sqlcommon/identity_sql_test.go | 18 +- internal/database/sqlcommon/message_sql.go | 100 +-- .../database/sqlcommon/message_sql_test.go | 76 +-- internal/database/sqlcommon/namespace_sql.go | 14 +- .../database/sqlcommon/namespace_sql_test.go | 12 +- internal/database/sqlcommon/nextpin_sql.go | 28 +- .../database/sqlcommon/nextpin_sql_test.go | 12 +- internal/database/sqlcommon/nonce_sql.go | 36 +- internal/database/sqlcommon/nonce_sql_test.go | 16 +- internal/database/sqlcommon/offset_sql.go | 58 +- .../database/sqlcommon/offset_sql_test.go | 30 +- internal/database/sqlcommon/operation_sql.go | 31 +- .../database/sqlcommon/operation_sql_test.go | 14 +- internal/database/sqlcommon/pin_sql.go | 48 +- internal/database/sqlcommon/pin_sql_test.go | 22 +- internal/database/sqlcommon/provider.go | 65 -- .../database/sqlcommon/provider_mock_test.go | 9 +- .../sqlcommon/provider_sqlitego_test.go | 9 +- internal/database/sqlcommon/sqlcommon.go | 410 +----------- internal/database/sqlcommon/sqlcommon_test.go | 273 +------- .../database/sqlcommon/subscription_sql.go | 43 +- .../sqlcommon/subscription_sql_test.go | 24 +- .../database/sqlcommon/tokenapproval_sql.go | 37 +- .../sqlcommon/tokenapproval_sql_test.go | 18 +- .../database/sqlcommon/tokenbalance_sql.go | 41 +- .../sqlcommon/tokenbalance_sql_test.go | 18 +- internal/database/sqlcommon/tokenpool_sql.go | 23 +- .../database/sqlcommon/tokenpool_sql_test.go | 14 +- .../database/sqlcommon/tokentransfer_sql.go | 23 +- .../sqlcommon/tokentransfer_sql_test.go | 14 +- .../database/sqlcommon/transaction_sql.go | 31 +- .../sqlcommon/transaction_sql_test.go | 18 +- internal/database/sqlcommon/verifier_sql.go | 28 +- .../database/sqlcommon/verifier_sql_test.go | 18 +- .../handler_identity_claim_test.go | 4 +- .../handler_identity_update_test.go | 6 +- internal/definitions/sender.go | 2 +- internal/events/aggregator_batch_state.go | 2 +- .../events/aggregator_batch_state_test.go | 8 +- internal/events/aggregator_test.go | 42 +- internal/events/batch_pin_complete_test.go | 2 +- internal/events/event_dispatcher_test.go | 6 +- internal/privatemessaging/groupmanager.go | 2 +- internal/reference/reference.go | 4 +- internal/txcommon/txcommon.go | 2 +- internal/txcommon/txcommon_test.go | 20 +- mocks/databasemocks/plugin.go | 400 ++++++------ pkg/core/message.go | 32 +- pkg/core/message_test.go | 2 +- pkg/core/stringarray.go | 145 ----- pkg/core/stringarray_test.go | 128 ---- pkg/core/transaction.go | 12 +- pkg/database/filter.go | 597 ------------------ pkg/database/filter_test.go | 345 ---------- pkg/database/plugin.go | 563 ++++++++--------- pkg/database/query_fields.go | 347 ---------- pkg/database/query_fields_test.go | 219 ------- pkg/database/update.go | 147 ----- pkg/database/update_test.go | 55 -- test/e2e/client/restclient.go | 8 +- test/e2e/multiparty/common.go | 2 +- 96 files changed, 1402 insertions(+), 4487 deletions(-) delete mode 100644 internal/database/sqlcommon/filter_sql.go delete mode 100644 internal/database/sqlcommon/filter_sql_test.go delete mode 100644 internal/database/sqlcommon/provider.go delete mode 100644 pkg/core/stringarray.go delete mode 100644 pkg/core/stringarray_test.go delete mode 100644 pkg/database/filter.go delete mode 100644 pkg/database/filter_test.go delete mode 100644 pkg/database/query_fields.go delete mode 100644 pkg/database/query_fields_test.go delete mode 100644 pkg/database/update.go delete mode 100644 pkg/database/update_test.go diff --git a/go.mod b/go.mod index 374f3cc9e0..f6b2b1912d 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/golang-migrate/migrate/v4 v4.15.2 github.com/gorilla/mux v1.8.0 github.com/gorilla/websocket v1.5.0 - github.com/hyperledger/firefly-common v1.1.4 + github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f github.com/hyperledger/firefly-signer v1.1.2 github.com/jarcoal/httpmock v1.2.0 github.com/karlseguin/ccache v2.0.3+incompatible diff --git a/go.sum b/go.sum index dfbe075100..24833da598 100644 --- a/go.sum +++ b/go.sum @@ -677,6 +677,18 @@ github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/J github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/hyperledger/firefly-common v1.1.4 h1:7Oqe7FFOSjt8Uo3/i/ujD4wke2kD2Xr2Kouq4QmgWns= github.com/hyperledger/firefly-common v1.1.4/go.mod h1:taWRM7vsramcM7iWjeOmIgm3yd+RoWvaXSWxZKpSXM8= +github.com/hyperledger/firefly-common v1.1.5-0.20221203042833-800b9fd4d10a h1:QQj909iAkIYl+eT7hJSzj2+ICo84i3coBc3JrY7Mylg= +github.com/hyperledger/firefly-common v1.1.5-0.20221203042833-800b9fd4d10a/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205003343-e0ddbf5dc33d h1:VuGCjaBtRvhUg/m9Sl5T1jvIpU6QsugBg6sNucugjGU= +github.com/hyperledger/firefly-common v1.1.5-0.20221205003343-e0ddbf5dc33d/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205003709-9aa8a5f5bd55 h1:CPWQEodCUg9rSWUj9RTeY6b86SMqo0aaI2mve/zPzAw= +github.com/hyperledger/firefly-common v1.1.5-0.20221205003709-9aa8a5f5bd55/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205005833-d82dd5500c51 h1:8ZXqf9MvInNdCMjSlXlMUT379VuKyc1dH1t9facqGaA= +github.com/hyperledger/firefly-common v1.1.5-0.20221205005833-d82dd5500c51/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41 h1:Tbz2Fbyq4fTk5rNXwiHk2W7wqZt1lrf/8wV4KGou2/w= +github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f h1:EfkvaVkVldy0tmJUjN0pnR83DpZYfG1ARR60c+Q4NJ4= +github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-signer v1.1.2 h1:QuS3M5w9px3BnPa4jIWMDg+z2ySK76MoO5Egh0G+tFg= github.com/hyperledger/firefly-signer v1.1.2/go.mod h1:4h2MN910A2knrWGYCT+aWjBDlhptgQn/9WcT1N/Ct8s= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= diff --git a/internal/batch/batch_processor.go b/internal/batch/batch_processor.go index 279e9073d5..25125ce1f9 100644 --- a/internal/batch/batch_processor.go +++ b/internal/batch/batch_processor.go @@ -102,7 +102,7 @@ type DispatchState struct { Data core.DataArray Pins []*fftypes.Bytes32 noncesAssigned map[fftypes.Bytes32]*nonceState - msgPins map[fftypes.UUID]core.FFStringArray + msgPins map[fftypes.UUID]fftypes.FFStringArray } const batchSizeEstimateBase = int64(512) @@ -484,9 +484,9 @@ func (bp *batchProcessor) maskContexts(ctx context.Context, state *DispatchState log.L(ctx).Debugf("Message %s already has %d pins allocated", msg.Header.ID, len(msg.Pins)) continue } - var pins core.FFStringArray + var pins fftypes.FFStringArray if isPrivate { - pins = make(core.FFStringArray, len(msg.Header.Topics)) + pins = make(fftypes.FFStringArray, len(msg.Header.Topics)) state.msgPins[*msg.Header.ID] = pins } for i, topic := range msg.Header.Topics { @@ -548,7 +548,7 @@ func (bp *batchProcessor) sealBatch(state *DispatchState) (err error) { // Clear state from any previous retry. We need to do fresh queries against the DB for nonces. state.noncesAssigned = make(map[fftypes.Bytes32]*nonceState) - state.msgPins = make(map[fftypes.UUID]core.FFStringArray) + state.msgPins = make(map[fftypes.UUID]fftypes.FFStringArray) if bp.conf.txType == core.TransactionTypeBatchPin { // Generate a new Transaction, which will be used to record status of the associated transaction as it happens diff --git a/internal/batch/batch_processor_test.go b/internal/batch/batch_processor_test.go index 602cec9f7f..6e97823255 100644 --- a/internal/batch/batch_processor_test.go +++ b/internal/batch/batch_processor_test.go @@ -251,7 +251,7 @@ func TestInsertNewNonceFail(t *testing.T) { {Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: gid, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }) @@ -291,7 +291,7 @@ func TestUpdateExistingNonceFail(t *testing.T) { {Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: gid, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }) @@ -326,7 +326,7 @@ func TestGetNonceFail(t *testing.T) { {Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: gid, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }) @@ -362,7 +362,7 @@ func TestGetNonceMigrationFail(t *testing.T) { {Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: gid, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }) @@ -437,7 +437,7 @@ func TestMarkMessageDispatchedUnpinnedOK(t *testing.T) { for i := 0; i < 5; i++ { msgid := fftypes.NewUUID() bp.newWork <- &batchWork{ - msg: &core.Message{Header: core.MessageHeader{ID: msgid, Topics: core.FFStringArray{"topic1"}}, Sequence: int64(1000 + i)}, + msg: &core.Message{Header: core.MessageHeader{ID: msgid, Topics: fftypes.FFStringArray{"topic1"}}, Sequence: int64(1000 + i)}, } } }() @@ -493,7 +493,7 @@ func TestMaskContextsRetryAfterPinsAssigned(t *testing.T) { ID: fftypes.NewUUID(), Type: core.MessageTypePrivate, Group: groupID, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, } msg2 := &core.Message{ @@ -501,7 +501,7 @@ func TestMaskContextsRetryAfterPinsAssigned(t *testing.T) { ID: fftypes.NewUUID(), Type: core.MessageTypePrivate, Group: groupID, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, } @@ -547,7 +547,7 @@ func TestMaskContextsUpdateMessageFail(t *testing.T) { ID: fftypes.NewUUID(), Type: core.MessageTypePrivate, Group: fftypes.NewRandB32(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, } diff --git a/internal/data/data_manager_test.go b/internal/data/data_manager_test.go index c253cf5622..0fb46f9ff7 100644 --- a/internal/data/data_manager_test.go +++ b/internal/data/data_manager_test.go @@ -1083,14 +1083,14 @@ func TestUpdateMessageCacheCRORequirePins(t *testing.T) { msgNoPins := &core.Message{ Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, Data: data.Refs(), } msgWithPins := &core.Message{ Header: msgNoPins.Header, Data: data.Refs(), - Pins: core.FFStringArray{"pin1"}, + Pins: fftypes.FFStringArray{"pin1"}, } msg, _ := dm.PeekMessageCache(ctx, msgWithPins.Header.ID) @@ -1127,7 +1127,7 @@ func TestUpdateMessageCacheCRORequireBatchID(t *testing.T) { msgNoPins := &core.Message{ Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, Data: data.Refs(), } diff --git a/internal/database/sqlcommon/batch_sql.go b/internal/database/sqlcommon/batch_sql.go index e5b8f31f91..6e9684f4fc 100644 --- a/internal/database/sqlcommon/batch_sql.go +++ b/internal/database/sqlcommon/batch_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -57,14 +58,14 @@ var ( const batchesTable = "batches" func (s *SQLCommon) UpsertBatch(ctx context.Context, batch *core.BatchPersisted) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // Do a select within the transaction to detemine if the UUID already exists - batchRows, _, err := s.queryTx(ctx, batchesTable, tx, + batchRows, _, err := s.QueryTx(ctx, batchesTable, tx, sq.Select("hash"). From(batchesTable). Where(sq.Eq{"id": batch.ID, "namespace": batch.Namespace}), @@ -88,7 +89,7 @@ func (s *SQLCommon) UpsertBatch(ctx context.Context, batch *core.BatchPersisted) if existing { // Update the batch - if _, err = s.updateTx(ctx, batchesTable, tx, + if _, err = s.UpdateTx(ctx, batchesTable, tx, sq.Update(batchesTable). Set("btype", string(batch.Type)). Set("author", batch.Author). @@ -110,7 +111,7 @@ func (s *SQLCommon) UpsertBatch(ctx context.Context, batch *core.BatchPersisted) } } else { - if _, err = s.insertTx(ctx, batchesTable, tx, + if _, err = s.InsertTx(ctx, batchesTable, tx, sq.Insert(batchesTable). Columns(batchColumns...). Values( @@ -136,7 +137,7 @@ func (s *SQLCommon) UpsertBatch(ctx context.Context, batch *core.BatchPersisted) } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) batchResult(ctx context.Context, row *sql.Rows) (*core.BatchPersisted, error) { @@ -164,7 +165,7 @@ func (s *SQLCommon) batchResult(ctx context.Context, row *sql.Rows) (*core.Batch func (s *SQLCommon) GetBatchByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.BatchPersisted, err error) { - rows, _, err := s.query(ctx, batchesTable, + rows, _, err := s.Query(ctx, batchesTable, sq.Select(batchColumns...). From(batchesTable). Where(sq.Eq{"id": id, "namespace": namespace}), @@ -187,14 +188,14 @@ func (s *SQLCommon) GetBatchByID(ctx context.Context, namespace string, id *ffty return batch, nil } -func (s *SQLCommon) GetBatches(ctx context.Context, namespace string, filter database.Filter) (message []*core.BatchPersisted, res *database.FilterResult, err error) { +func (s *SQLCommon) GetBatches(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.BatchPersisted, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(batchColumns...).From(batchesTable), filter, batchFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(batchColumns...).From(batchesTable), filter, batchFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, batchesTable, query) + rows, tx, err := s.Query(ctx, batchesTable, query) if err != nil { return nil, nil, err } @@ -209,28 +210,28 @@ func (s *SQLCommon) GetBatches(ctx context.Context, namespace string, filter dat batches = append(batches, batch) } - return batches, s.queryRes(ctx, batchesTable, tx, fop, fi), err + return batches, s.QueryRes(ctx, batchesTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) (err error) { +func (s *SQLCommon) UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(batchesTable), update, batchFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(batchesTable), update, batchFilterFieldMap) if err != nil { return err } query = query.Where(sq.Eq{"id": id, "namespace": namespace}) - _, err = s.updateTx(ctx, batchesTable, tx, query, nil /* no change events on filter update */) + _, err = s.UpdateTx(ctx, batchesTable, tx, query, nil /* no change events on filter update */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/batch_sql_test.go b/internal/database/sqlcommon/batch_sql_test.go index 969113e03b..15f462d207 100644 --- a/internal/database/sqlcommon/batch_sql_test.go +++ b/internal/database/sqlcommon/batch_sql_test.go @@ -167,7 +167,7 @@ func TestUpsertBatchFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertBatch(context.Background(), &core.BatchPersisted{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -178,7 +178,7 @@ func TestUpsertBatchFailSelect(t *testing.T) { mock.ExpectRollback() batchID := fftypes.NewUUID() err := s.UpsertBatch(context.Background(), &core.BatchPersisted{BatchHeader: core.BatchHeader{ID: batchID}}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -190,7 +190,7 @@ func TestUpsertBatchFailInsert(t *testing.T) { mock.ExpectRollback() batchID := fftypes.NewUUID() err := s.UpsertBatch(context.Background(), &core.BatchPersisted{BatchHeader: core.BatchHeader{ID: batchID}}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -203,7 +203,7 @@ func TestUpsertBatchFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertBatch(context.Background(), &core.BatchPersisted{BatchHeader: core.BatchHeader{ID: batchID}, Hash: hash}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -215,7 +215,7 @@ func TestUpsertBatchFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertBatch(context.Background(), &core.BatchPersisted{BatchHeader: core.BatchHeader{ID: batchID}}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -224,7 +224,7 @@ func TestGetBatchByIDSelectFail(t *testing.T) { batchID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetBatchByID(context.Background(), "ns1", batchID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -252,7 +252,7 @@ func TestGetBatchesQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.BatchQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetBatches(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -277,7 +277,7 @@ func TestBatchUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.BatchQueryFactory.NewUpdate(context.Background()).Set("id", "anything") err := s.UpdateBatch(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestBatchUpdateBuildQueryFail(t *testing.T) { @@ -295,5 +295,5 @@ func TestBatchUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.BatchQueryFactory.NewUpdate(context.Background()).Set("id", fftypes.NewUUID()) err := s.UpdateBatch(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/blob_sql.go b/internal/database/sqlcommon/blob_sql.go index 02fe582285..7cfc759974 100644 --- a/internal/database/sqlcommon/blob_sql.go +++ b/internal/database/sqlcommon/blob_sql.go @@ -21,12 +21,13 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) var ( @@ -45,18 +46,18 @@ var ( const blobsTable = "blobs" func (s *SQLCommon) InsertBlob(ctx context.Context, blob *core.Blob) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) err = s.attemptBlobInsert(ctx, tx, blob) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) setBlobInsertValues(query sq.InsertBuilder, blob *core.Blob) sq.InsertBuilder { @@ -69,8 +70,8 @@ func (s *SQLCommon) setBlobInsertValues(query sq.InsertBuilder, blob *core.Blob) ) } -func (s *SQLCommon) attemptBlobInsert(ctx context.Context, tx *txWrapper, blob *core.Blob) (err error) { - blob.Sequence, err = s.insertTx(ctx, blobsTable, tx, +func (s *SQLCommon) attemptBlobInsert(ctx context.Context, tx *dbsql.TXWrapper, blob *core.Blob) (err error) { + blob.Sequence, err = s.InsertTx(ctx, blobsTable, tx, s.setBlobInsertValues(sq.Insert(blobsTable).Columns(blobColumns...), blob), nil, // no change events for blobs ) @@ -79,11 +80,11 @@ func (s *SQLCommon) attemptBlobInsert(ctx context.Context, tx *txWrapper, blob * func (s *SQLCommon) InsertBlobs(ctx context.Context, blobs []*core.Blob) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) if s.features.MultiRowInsert { query := sq.Insert(blobsTable).Columns(blobColumns...) @@ -91,7 +92,7 @@ func (s *SQLCommon) InsertBlobs(ctx context.Context, blobs []*core.Blob) (err er query = s.setBlobInsertValues(query, blob) } sequences := make([]int64, len(blobs)) - err := s.insertTxRows(ctx, blobsTable, tx, query, + err := s.InsertTxRows(ctx, blobsTable, tx, query, nil, /* no change events for blobs */ sequences, true /* we want the caller to be able to retry with individual upserts */) @@ -108,7 +109,7 @@ func (s *SQLCommon) InsertBlobs(ctx context.Context, blobs []*core.Blob) (err er } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } @@ -130,8 +131,8 @@ func (s *SQLCommon) blobResult(ctx context.Context, row *sql.Rows) (*core.Blob, func (s *SQLCommon) getBlobPred(ctx context.Context, desc string, pred interface{}) (message *core.Blob, err error) { cols := append([]string{}, blobColumns...) - cols = append(cols, sequenceColumn) - rows, _, err := s.query(ctx, blobsTable, + cols = append(cols, s.SequenceColumn()) + rows, _, err := s.Query(ctx, blobsTable, sq.Select(cols...). From(blobsTable). Where(pred). @@ -161,16 +162,16 @@ func (s *SQLCommon) GetBlobMatchingHash(ctx context.Context, hash *fftypes.Bytes }) } -func (s *SQLCommon) GetBlobs(ctx context.Context, filter database.Filter) (message []*core.Blob, res *database.FilterResult, err error) { +func (s *SQLCommon) GetBlobs(ctx context.Context, filter ffapi.Filter) (message []*core.Blob, res *ffapi.FilterResult, err error) { cols := append([]string{}, blobColumns...) - cols = append(cols, sequenceColumn) - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(cols...).From(blobsTable), filter, blobFilterFieldMap, []interface{}{"sequence"}) + cols = append(cols, s.SequenceColumn()) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(cols...).From(blobsTable), filter, blobFilterFieldMap, []interface{}{"sequence"}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, blobsTable, query) + rows, tx, err := s.Query(ctx, blobsTable, query) if err != nil { return nil, nil, err } @@ -185,24 +186,24 @@ func (s *SQLCommon) GetBlobs(ctx context.Context, filter database.Filter) (messa blob = append(blob, d) } - return blob, s.queryRes(ctx, blobsTable, tx, fop, fi), err + return blob, s.QueryRes(ctx, blobsTable, tx, fop, fi), err } func (s *SQLCommon) DeleteBlob(ctx context.Context, sequence int64) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - err = s.deleteTx(ctx, blobsTable, tx, sq.Delete(blobsTable).Where(sq.Eq{ - sequenceColumn: sequence, + err = s.DeleteTx(ctx, blobsTable, tx, sq.Delete(blobsTable).Where(sq.Eq{ + s.SequenceColumn(): sequence, }), nil /* no change events for blobs */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/blob_sql_test.go b/internal/database/sqlcommon/blob_sql_test.go index b672b44389..8e588330bb 100644 --- a/internal/database/sqlcommon/blob_sql_test.go +++ b/internal/database/sqlcommon/blob_sql_test.go @@ -84,7 +84,7 @@ func TestInsertBlobFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertBlob(context.Background(), &core.Blob{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -94,7 +94,7 @@ func TestInsertBlobFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.InsertBlob(context.Background(), &core.Blob{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -104,7 +104,7 @@ func TestInsertBlobFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertBlob(context.Background(), &core.Blob{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -112,7 +112,7 @@ func TestInsertBlobsBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertBlobs(context.Background(), []*core.Blob{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -126,7 +126,7 @@ func TestInsertBlobsMultiRowOK(t *testing.T) { blob2 := &core.Blob{Hash: fftypes.NewRandB32(), PayloadRef: "pay2"} mock.ExpectBegin() - mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1001)). AddRow(int64(1002)), ) @@ -145,7 +145,7 @@ func TestInsertBlobsMultiRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertBlobs(context.Background(), []*core.Blob{blob1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -156,7 +156,7 @@ func TestInsertBlobsSingleRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertBlobs(context.Background(), []*core.Blob{blob1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -165,7 +165,7 @@ func TestGetBlobByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetBlobMatchingHash(context.Background(), fftypes.NewRandB32()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -191,7 +191,7 @@ func TestGetBlobQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.BlobQueryFactory.NewFilter(context.Background()).Eq("hash", "") _, _, err := s.GetBlobs(context.Background(), f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -215,7 +215,7 @@ func TestBlobDeleteBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.DeleteBlob(context.Background(), 12345) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestBlobDeleteFail(t *testing.T) { @@ -224,5 +224,5 @@ func TestBlobDeleteFail(t *testing.T) { mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.DeleteBlob(context.Background(), 12345) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) } diff --git a/internal/database/sqlcommon/blockchainevents_sql.go b/internal/database/sqlcommon/blockchainevents_sql.go index 0dbdf40353..0f55e5243b 100644 --- a/internal/database/sqlcommon/blockchainevents_sql.go +++ b/internal/database/sqlcommon/blockchainevents_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -72,8 +74,8 @@ func (s *SQLCommon) setBlockchainEventInsertValues(query sq.InsertBuilder, event ) } -func (s *SQLCommon) attemptBlockchainEventInsert(ctx context.Context, tx *txWrapper, event *core.BlockchainEvent, requestConflictEmptyResult bool) (err error) { - _, err = s.insertTxExt(ctx, messagesTable, tx, +func (s *SQLCommon) attemptBlockchainEventInsert(ctx context.Context, tx *dbsql.TXWrapper, event *core.BlockchainEvent, requestConflictEmptyResult bool) (err error) { + _, err = s.InsertTxExt(ctx, messagesTable, tx, s.setBlockchainEventInsertValues(sq.Insert(blockchaineventsTable).Columns(blockchainEventColumns...), event), func() { s.callbacks.UUIDCollectionNSEvent(database.CollectionBlockchainEvents, core.ChangeEventTypeCreated, event.Namespace, event.ID) @@ -82,15 +84,15 @@ func (s *SQLCommon) attemptBlockchainEventInsert(ctx context.Context, tx *txWrap } func (s *SQLCommon) InsertOrGetBlockchainEvent(ctx context.Context, event *core.BlockchainEvent) (existing *core.BlockchainEvent, err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return nil, err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) opErr := s.attemptBlockchainEventInsert(ctx, tx, event, true /* we want a failure here we can progress past */) if opErr == nil { - return nil, s.commitTx(ctx, tx, autoCommit) + return nil, s.CommitTx(ctx, tx, autoCommit) } // Do a select within the transaction to determine if the protocolID already exists @@ -126,7 +128,7 @@ func (s *SQLCommon) blockchainEventResult(ctx context.Context, row *sql.Rows) (* } func (s *SQLCommon) getBlockchainEventPred(ctx context.Context, desc string, pred interface{}) (*core.BlockchainEvent, error) { - rows, _, err := s.query(ctx, blockchaineventsTable, + rows, _, err := s.Query(ctx, blockchaineventsTable, sq.Select(blockchainEventColumns...). From(blockchaineventsTable). Where(pred), @@ -161,16 +163,16 @@ func (s *SQLCommon) GetBlockchainEventByProtocolID(ctx context.Context, ns strin }) } -func (s *SQLCommon) GetBlockchainEvents(ctx context.Context, namespace string, filter database.Filter) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (s *SQLCommon) GetBlockchainEvents(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { - query, fop, fi, err := s.filterSelect(ctx, "", + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(blockchainEventColumns...).From(blockchaineventsTable), filter, blockchainEventFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, blockchaineventsTable, query) + rows, tx, err := s.Query(ctx, blockchaineventsTable, query) if err != nil { return nil, nil, err } @@ -185,5 +187,5 @@ func (s *SQLCommon) GetBlockchainEvents(ctx context.Context, namespace string, f events = append(events, event) } - return events, s.queryRes(ctx, blockchaineventsTable, tx, fop, fi), err + return events, s.QueryRes(ctx, blockchaineventsTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/blockchainevents_sql_test.go b/internal/database/sqlcommon/blockchainevents_sql_test.go index 0be3e75f65..796cd871bb 100644 --- a/internal/database/sqlcommon/blockchainevents_sql_test.go +++ b/internal/database/sqlcommon/blockchainevents_sql_test.go @@ -121,7 +121,7 @@ func TestInsertBlockchainEventFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) _, err := s.InsertOrGetBlockchainEvent(context.Background(), &core.BlockchainEvent{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -132,7 +132,7 @@ func TestInsertBlockchainEventFailInsert(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{})) mock.ExpectRollback() _, err := s.InsertOrGetBlockchainEvent(context.Background(), &core.BlockchainEvent{}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -142,7 +142,7 @@ func TestInsertBlockchainEventFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) _, err := s.InsertOrGetBlockchainEvent(context.Background(), &core.BlockchainEvent{}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -150,7 +150,7 @@ func TestGetBlockchainEventByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetBlockchainEventByID(context.Background(), "ns", fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -176,7 +176,7 @@ func TestGetBlockchainEventsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.BlockchainEventQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetBlockchainEvents(context.Background(), "ns", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/chart_sql.go b/internal/database/sqlcommon/chart_sql.go index 006af1256c..b207020be1 100644 --- a/internal/database/sqlcommon/chart_sql.go +++ b/internal/database/sqlcommon/chart_sql.go @@ -129,7 +129,7 @@ func (s *SQLCommon) GetChartHistogram(ctx context.Context, ns string, intervals for i, query := range queries { // Query bucket's data - rows, _, err := s.query(ctx, tableName, query) + rows, _, err := s.Query(ctx, tableName, query) if err != nil { return nil, err } diff --git a/internal/database/sqlcommon/chart_sql_test.go b/internal/database/sqlcommon/chart_sql_test.go index 03c0e48c77..2df51d1441 100644 --- a/internal/database/sqlcommon/chart_sql_test.go +++ b/internal/database/sqlcommon/chart_sql_test.go @@ -188,7 +188,7 @@ func TestGetChartHistogramsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT *").WillReturnError(fmt.Errorf("pop")) _, err := s.GetChartHistogram(context.Background(), "ns1", mockHistogramInterval, database.CollectionName("messages")) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/config.go b/internal/database/sqlcommon/config.go index 237c03ef9f..02817de951 100644 --- a/internal/database/sqlcommon/config.go +++ b/internal/database/sqlcommon/config.go @@ -20,6 +20,7 @@ import ( "fmt" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" ) const ( @@ -43,7 +44,7 @@ const ( defaultMigrationsDirectoryTemplate = "./db/migrations/%s" ) -func (s *SQLCommon) InitConfig(provider Provider, config config.Section) { +func (s *SQLCommon) InitConfig(provider dbsql.Provider, config config.Section) { config.AddKnownKey(SQLConfMigrationsAuto, false) config.AddKnownKey(SQLConfDatasourceURL) config.AddKnownKey(SQLConfMigrationsDirectory, fmt.Sprintf(defaultMigrationsDirectoryTemplate, provider.MigrationsDir())) diff --git a/internal/database/sqlcommon/contractapis_sql.go b/internal/database/sqlcommon/contractapis_sql.go index d7bb989b72..7a8a95b308 100644 --- a/internal/database/sqlcommon/contractapis_sql.go +++ b/internal/database/sqlcommon/contractapis_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -47,13 +48,13 @@ var ( const contractapisTable = "contractapis" func (s *SQLCommon) UpsertContractAPI(ctx context.Context, api *core.ContractAPI) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, contractapisTable, tx, + rows, _, err := s.QueryTx(ctx, contractapisTable, tx, sq.Select("id"). From(contractapisTable). Where(sq.Eq{ @@ -78,7 +79,7 @@ func (s *SQLCommon) UpsertContractAPI(ctx context.Context, api *core.ContractAPI rows.Close() if existing { - if _, err = s.updateTx(ctx, contractapisTable, tx, + if _, err = s.UpdateTx(ctx, contractapisTable, tx, sq.Update(contractapisTable). Set("id", api.ID). Set("interface_id", api.Interface.ID). @@ -92,7 +93,7 @@ func (s *SQLCommon) UpsertContractAPI(ctx context.Context, api *core.ContractAPI return err } } else { - if _, err = s.insertTx(ctx, contractapisTable, tx, + if _, err = s.InsertTx(ctx, contractapisTable, tx, sq.Insert(contractapisTable). Columns(contractAPIsColumns...). Values( @@ -111,7 +112,7 @@ func (s *SQLCommon) UpsertContractAPI(ctx context.Context, api *core.ContractAPI } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) contractAPIResult(ctx context.Context, row *sql.Rows) (*core.ContractAPI, error) { @@ -133,7 +134,7 @@ func (s *SQLCommon) contractAPIResult(ctx context.Context, row *sql.Rows) (*core } func (s *SQLCommon) getContractAPIPred(ctx context.Context, desc string, pred interface{}) (*core.ContractAPI, error) { - rows, _, err := s.query(ctx, contractapisTable, + rows, _, err := s.Query(ctx, contractapisTable, sq.Select(contractAPIsColumns...). From(contractapisTable). Where(pred), @@ -156,15 +157,15 @@ func (s *SQLCommon) getContractAPIPred(ctx context.Context, desc string, pred in return api, nil } -func (s *SQLCommon) GetContractAPIs(ctx context.Context, namespace string, filter database.AndFilter) (contractAPIs []*core.ContractAPI, res *database.FilterResult, err error) { +func (s *SQLCommon) GetContractAPIs(ctx context.Context, namespace string, filter ffapi.AndFilter) (contractAPIs []*core.ContractAPI, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(contractAPIsColumns...).From(contractapisTable), + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(contractAPIsColumns...).From(contractapisTable), filter, contractAPIsFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, contractapisTable, query) + rows, tx, err := s.Query(ctx, contractapisTable, query) if err != nil { return nil, nil, err } @@ -179,7 +180,7 @@ func (s *SQLCommon) GetContractAPIs(ctx context.Context, namespace string, filte apis = append(apis, api) } - return apis, s.queryRes(ctx, contractapisTable, tx, fop, fi), err + return apis, s.QueryRes(ctx, contractapisTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/contractapis_sql_test.go b/internal/database/sqlcommon/contractapis_sql_test.go index 20d2920b20..7162b52a22 100644 --- a/internal/database/sqlcommon/contractapis_sql_test.go +++ b/internal/database/sqlcommon/contractapis_sql_test.go @@ -82,7 +82,7 @@ func TestContractAPIDBFailBeginTransaction(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertContractAPI(context.Background(), &core.ContractAPI{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -105,7 +105,7 @@ func TestContractAPIDBFailInsert(t *testing.T) { Interface: &fftypes.FFIReference{}, } err := s.UpsertContractAPI(context.Background(), api) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/contractlisteners_sql.go b/internal/database/sqlcommon/contractlisteners_sql.go index 6844692a36..5fb2487333 100644 --- a/internal/database/sqlcommon/contractlisteners_sql.go +++ b/internal/database/sqlcommon/contractlisteners_sql.go @@ -22,6 +22,7 @@ import ( "fmt" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -53,11 +54,11 @@ var ( const contractlistenersTable = "contractlisteners" func (s *SQLCommon) InsertContractListener(ctx context.Context, listener *core.ContractListener) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) var interfaceID *fftypes.UUID if listener.Interface != nil { @@ -65,7 +66,7 @@ func (s *SQLCommon) InsertContractListener(ctx context.Context, listener *core.C } listener.Created = fftypes.Now() - if _, err = s.insertTx(ctx, contractlistenersTable, tx, + if _, err = s.InsertTx(ctx, contractlistenersTable, tx, sq.Insert(contractlistenersTable). Columns(contractListenerColumns...). Values( @@ -88,7 +89,7 @@ func (s *SQLCommon) InsertContractListener(ctx context.Context, listener *core.C return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) contractListenerResult(ctx context.Context, row *sql.Rows) (*core.ContractListener, error) { @@ -115,7 +116,7 @@ func (s *SQLCommon) contractListenerResult(ctx context.Context, row *sql.Rows) ( } func (s *SQLCommon) getContractListenerPred(ctx context.Context, desc string, pred interface{}) (*core.ContractListener, error) { - rows, _, err := s.query(ctx, contractlistenersTable, + rows, _, err := s.Query(ctx, contractlistenersTable, sq.Select(contractListenerColumns...). From(contractlistenersTable). Where(pred), @@ -150,15 +151,15 @@ func (s *SQLCommon) GetContractListenerByBackendID(ctx context.Context, namespac return s.getContractListenerPred(ctx, id, sq.Eq{"backend_id": id, "namespace": namespace}) } -func (s *SQLCommon) GetContractListeners(ctx context.Context, namespace string, filter database.Filter) ([]*core.ContractListener, *database.FilterResult, error) { - query, fop, fi, err := s.filterSelect(ctx, "", +func (s *SQLCommon) GetContractListeners(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.ContractListener, *ffapi.FilterResult, error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(contractListenerColumns...).From(contractlistenersTable), filter, contractListenerFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, contractlistenersTable, query) + rows, tx, err := s.Query(ctx, contractlistenersTable, query) if err != nil { return nil, nil, err } @@ -173,19 +174,19 @@ func (s *SQLCommon) GetContractListeners(ctx context.Context, namespace string, subs = append(subs, sub) } - return subs, s.queryRes(ctx, contractlistenersTable, tx, fop, fi), err + return subs, s.QueryRes(ctx, contractlistenersTable, tx, fop, fi), err } func (s *SQLCommon) DeleteContractListenerByID(ctx context.Context, namespace string, id *fftypes.UUID) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) sub, err := s.GetContractListenerByID(ctx, namespace, id) if err == nil && sub != nil { - err = s.deleteTx(ctx, contractlistenersTable, tx, sq.Delete(contractlistenersTable).Where(sq.Eq{"id": id}), + err = s.DeleteTx(ctx, contractlistenersTable, tx, sq.Delete(contractlistenersTable).Where(sq.Eq{"id": id}), func() { s.callbacks.UUIDCollectionNSEvent(database.CollectionContractListeners, core.ChangeEventTypeDeleted, sub.Namespace, sub.ID) }, @@ -195,5 +196,5 @@ func (s *SQLCommon) DeleteContractListenerByID(ctx context.Context, namespace st } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/contractlisteners_sql_test.go b/internal/database/sqlcommon/contractlisteners_sql_test.go index da06e157b9..3d29184dfe 100644 --- a/internal/database/sqlcommon/contractlisteners_sql_test.go +++ b/internal/database/sqlcommon/contractlisteners_sql_test.go @@ -119,7 +119,7 @@ func TestUpsertContractListenerFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertContractListener(context.Background(), &core.ContractListener{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -129,7 +129,7 @@ func TestUpsertContractListenerFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.InsertContractListener(context.Background(), &core.ContractListener{}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -139,7 +139,7 @@ func TestUpsertContractListenerFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertContractListener(context.Background(), &core.ContractListener{}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -147,7 +147,7 @@ func TestGetContractListenerByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetContractListenerByID(context.Background(), "ns", fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -173,7 +173,7 @@ func TestGetContractListenersQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.ContractListenerQueryFactory.NewFilter(context.Background()).Eq("backendid", "") _, _, err := s.GetContractListeners(context.Background(), "ns", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -197,7 +197,7 @@ func TestContractListenerDeleteBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.DeleteContractListenerByID(context.Background(), "ns", fftypes.NewUUID()) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestContractListenerDeleteFail(t *testing.T) { @@ -208,7 +208,7 @@ func TestContractListenerDeleteFail(t *testing.T) { ) mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) err := s.DeleteContractListenerByID(context.Background(), "ns", fftypes.NewUUID()) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) } func TestContractListenerOptions(t *testing.T) { diff --git a/internal/database/sqlcommon/data_sql.go b/internal/database/sqlcommon/data_sql.go index 58d10f910e..312fd82102 100644 --- a/internal/database/sqlcommon/data_sql.go +++ b/internal/database/sqlcommon/data_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -59,7 +61,7 @@ var ( const dataTable = "data" -func (s *SQLCommon) attemptDataUpdate(ctx context.Context, tx *txWrapper, data *core.Data) (int64, error) { +func (s *SQLCommon) attemptDataUpdate(ctx context.Context, tx *dbsql.TXWrapper, data *core.Data) (int64, error) { datatype := data.Datatype if datatype == nil { datatype = &core.DatatypeRef{} @@ -68,7 +70,7 @@ func (s *SQLCommon) attemptDataUpdate(ctx context.Context, tx *txWrapper, data * if blob == nil { blob = &core.BlobRef{} } - return s.updateTx(ctx, dataTable, tx, + return s.UpdateTx(ctx, dataTable, tx, sq.Update(dataTable). Set("validator", string(data.Validator)). Set("datatype_name", datatype.Name). @@ -119,8 +121,8 @@ func (s *SQLCommon) setDataInsertValues(query sq.InsertBuilder, data *core.Data) ) } -func (s *SQLCommon) attemptDataInsert(ctx context.Context, tx *txWrapper, data *core.Data, requestConflictEmptyResult bool) (int64, error) { - return s.insertTxExt(ctx, dataTable, tx, +func (s *SQLCommon) attemptDataInsert(ctx context.Context, tx *dbsql.TXWrapper, data *core.Data, requestConflictEmptyResult bool) (int64, error) { + return s.InsertTxExt(ctx, dataTable, tx, s.setDataInsertValues(sq.Insert(dataTable).Columns(dataColumnsWithValue...), data), func() { s.callbacks.UUIDCollectionNSEvent(database.CollectionData, core.ChangeEventTypeCreated, data.Namespace, data.ID) @@ -128,11 +130,11 @@ func (s *SQLCommon) attemptDataInsert(ctx context.Context, tx *txWrapper, data * } func (s *SQLCommon) UpsertData(ctx context.Context, data *core.Data, optimization database.UpsertOptimization) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // This is a performance critical function, as we stream data into the database for every message, in every batch. // @@ -150,7 +152,7 @@ func (s *SQLCommon) UpsertData(ctx context.Context, data *core.Data, optimizatio if !optimized { // Do a select within the transaction to determine if the UUID already exists - dataRows, _, err := s.queryTx(ctx, dataTable, tx, + dataRows, _, err := s.QueryTx(ctx, dataTable, tx, sq.Select("hash"). From(dataTable). Where(sq.Eq{"id": data.ID, "namespace": data.Namespace}), @@ -182,16 +184,16 @@ func (s *SQLCommon) UpsertData(ctx context.Context, data *core.Data, optimizatio } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) InsertDataArray(ctx context.Context, dataArray core.DataArray) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) if s.features.MultiRowInsert { query := sq.Insert(dataTable).Columns(dataColumnsWithValue...) @@ -199,7 +201,7 @@ func (s *SQLCommon) InsertDataArray(ctx context.Context, dataArray core.DataArra query = s.setDataInsertValues(query, data) } sequences := make([]int64, len(dataArray)) - err := s.insertTxRows(ctx, dataTable, tx, query, func() { + err := s.InsertTxRows(ctx, dataTable, tx, query, func() { for _, data := range dataArray { s.callbacks.UUIDCollectionNSEvent(database.CollectionData, core.ChangeEventTypeCreated, data.Namespace, data.ID) } @@ -217,7 +219,7 @@ func (s *SQLCommon) InsertDataArray(ctx context.Context, dataArray core.DataArra } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } @@ -265,7 +267,7 @@ func (s *SQLCommon) GetDataByID(ctx context.Context, namespace string, id *fftyp } else { cols = dataColumnsNoValue } - rows, _, err := s.query(ctx, dataTable, + rows, _, err := s.Query(ctx, dataTable, sq.Select(cols...). From(dataTable). Where(sq.Eq{"id": id, "namespace": namespace}), @@ -288,16 +290,16 @@ func (s *SQLCommon) GetDataByID(ctx context.Context, namespace string, id *fftyp return data, nil } -func (s *SQLCommon) GetData(ctx context.Context, namespace string, filter database.Filter) (message core.DataArray, res *database.FilterResult, err error) { +func (s *SQLCommon) GetData(ctx context.Context, namespace string, filter ffapi.Filter) (message core.DataArray, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect( + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select(dataColumnsWithValue...).From(dataTable), filter, dataFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, dataTable, query) + rows, tx, err := s.Query(ctx, dataTable, query) if err != nil { return nil, nil, err } @@ -312,20 +314,20 @@ func (s *SQLCommon) GetData(ctx context.Context, namespace string, filter databa data = append(data, d) } - return data, s.queryRes(ctx, dataTable, tx, fop, fi), err + return data, s.QueryRes(ctx, dataTable, tx, fop, fi), err } -func (s *SQLCommon) GetDataRefs(ctx context.Context, namespace string, filter database.Filter) (message core.DataRefs, res *database.FilterResult, err error) { +func (s *SQLCommon) GetDataRefs(ctx context.Context, namespace string, filter ffapi.Filter) (message core.DataRefs, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect( + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select("id", "hash").From(dataTable), filter, dataFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, dataTable, query) + rows, tx, err := s.Query(ctx, dataTable, query) if err != nil { return nil, nil, err } @@ -344,28 +346,28 @@ func (s *SQLCommon) GetDataRefs(ctx context.Context, namespace string, filter da refs = append(refs, &ref) } - return refs, s.queryRes(ctx, dataTable, tx, fop, fi), err + return refs, s.QueryRes(ctx, dataTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) (err error) { +func (s *SQLCommon) UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(dataTable), update, dataFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(dataTable), update, dataFilterFieldMap) if err != nil { return err } query = query.Where(sq.Eq{"id": id, "namespace": namespace}) - _, err = s.updateTx(ctx, dataTable, tx, query, nil /* no change events for filter based updates */) + _, err = s.UpdateTx(ctx, dataTable, tx, query, nil /* no change events for filter based updates */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/data_sql_test.go b/internal/database/sqlcommon/data_sql_test.go index cd52655206..873d1ccc95 100644 --- a/internal/database/sqlcommon/data_sql_test.go +++ b/internal/database/sqlcommon/data_sql_test.go @@ -171,7 +171,7 @@ func TestUpsertDataFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertData(context.Background(), &core.Data{}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -182,7 +182,7 @@ func TestUpsertDataFailSelect(t *testing.T) { mock.ExpectRollback() dataID := fftypes.NewUUID() err := s.UpsertData(context.Background(), &core.Data{ID: dataID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -194,7 +194,7 @@ func TestUpsertDataFailInsert(t *testing.T) { mock.ExpectRollback() dataID := fftypes.NewUUID() err := s.UpsertData(context.Background(), &core.Data{ID: dataID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -207,7 +207,7 @@ func TestUpsertDataFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertData(context.Background(), &core.Data{ID: dataID, Hash: dataHash}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -219,7 +219,7 @@ func TestUpsertDataFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertData(context.Background(), &core.Data{ID: dataID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -227,7 +227,7 @@ func TestInsertDataArrayBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertDataArray(context.Background(), core.DataArray{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -243,7 +243,7 @@ func TestInsertDataArrayMultiRowOK(t *testing.T) { s.callbacks.On("UUIDCollectionNSEvent", database.CollectionData, core.ChangeEventTypeCreated, "ns1", data2.ID) mock.ExpectBegin() - mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1001)). AddRow(int64(1002)), ) @@ -262,7 +262,7 @@ func TestInsertDataArrayMultiRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertDataArray(context.Background(), core.DataArray{data1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -273,7 +273,7 @@ func TestInsertDataArraySingleRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertDataArray(context.Background(), core.DataArray{data1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -283,7 +283,7 @@ func TestGetDataByIDSelectFail(t *testing.T) { dataID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetDataByID(context.Background(), "ns1", dataID, false) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -311,7 +311,7 @@ func TestGetDataQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.DataQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetData(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -336,7 +336,7 @@ func TestGetDataRefsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.DataQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetDataRefs(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -361,7 +361,7 @@ func TestDataUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.DataQueryFactory.NewUpdate(context.Background()).Set("id", "anything") err := s.UpdateData(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestDataUpdateBuildQueryFail(t *testing.T) { @@ -379,5 +379,5 @@ func TestDataUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.DataQueryFactory.NewUpdate(context.Background()).Set("id", fftypes.NewUUID()) err := s.UpdateData(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/datatype_sql.go b/internal/database/sqlcommon/datatype_sql.go index 48ffefe294..f337176266 100644 --- a/internal/database/sqlcommon/datatype_sql.go +++ b/internal/database/sqlcommon/datatype_sql.go @@ -22,6 +22,7 @@ import ( "fmt" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -50,16 +51,16 @@ var ( const datatypesTable = "datatypes" func (s *SQLCommon) UpsertDatatype(ctx context.Context, datatype *core.Datatype, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) existing := false if allowExisting { // Do a select within the transaction to detemine if the UUID already exists - datatypeRows, _, err := s.queryTx(ctx, datatypesTable, tx, + datatypeRows, _, err := s.QueryTx(ctx, datatypesTable, tx, sq.Select("id"). From(datatypesTable). Where(sq.Eq{ @@ -77,7 +78,7 @@ func (s *SQLCommon) UpsertDatatype(ctx context.Context, datatype *core.Datatype, if existing { // Update the datatype - if _, err = s.updateTx(ctx, datatypesTable, tx, + if _, err = s.UpdateTx(ctx, datatypesTable, tx, sq.Update(datatypesTable). Set("message_id", datatype.Message). Set("validator", string(datatype.Validator)). @@ -94,7 +95,7 @@ func (s *SQLCommon) UpsertDatatype(ctx context.Context, datatype *core.Datatype, return err } } else { - if _, err = s.insertTx(ctx, datatypesTable, tx, + if _, err = s.InsertTx(ctx, datatypesTable, tx, sq.Insert(datatypesTable). Columns(datatypeColumns...). Values( @@ -116,7 +117,7 @@ func (s *SQLCommon) UpsertDatatype(ctx context.Context, datatype *core.Datatype, } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) datatypeResult(ctx context.Context, row *sql.Rows) (*core.Datatype, error) { @@ -140,7 +141,7 @@ func (s *SQLCommon) datatypeResult(ctx context.Context, row *sql.Rows) (*core.Da func (s *SQLCommon) getDatatypeEq(ctx context.Context, eq sq.Eq, textName string) (message *core.Datatype, err error) { - rows, _, err := s.query(ctx, datatypesTable, + rows, _, err := s.Query(ctx, datatypesTable, sq.Select(datatypeColumns...). From(datatypesTable). Where(eq), @@ -171,16 +172,16 @@ func (s *SQLCommon) GetDatatypeByName(ctx context.Context, ns, name, version str return s.getDatatypeEq(ctx, sq.Eq{"namespace": ns, "name": name, "version": version}, fmt.Sprintf("%s:%s", ns, name)) } -func (s *SQLCommon) GetDatatypes(ctx context.Context, namespace string, filter database.Filter) (message []*core.Datatype, res *database.FilterResult, err error) { +func (s *SQLCommon) GetDatatypes(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Datatype, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect( + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select(datatypeColumns...).From(datatypesTable), filter, datatypeFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, datatypesTable, query) + rows, tx, err := s.Query(ctx, datatypesTable, query) if err != nil { return nil, nil, err } @@ -195,6 +196,6 @@ func (s *SQLCommon) GetDatatypes(ctx context.Context, namespace string, filter d datatypes = append(datatypes, datatype) } - return datatypes, s.queryRes(ctx, datatypesTable, tx, fop, fi), err + return datatypes, s.QueryRes(ctx, datatypesTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/datatype_sql_test.go b/internal/database/sqlcommon/datatype_sql_test.go index fad4efb5e2..cca132d69e 100644 --- a/internal/database/sqlcommon/datatype_sql_test.go +++ b/internal/database/sqlcommon/datatype_sql_test.go @@ -123,7 +123,7 @@ func TestUpsertDatatypeFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertDatatype(context.Background(), &core.Datatype{}, true) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -134,7 +134,7 @@ func TestUpsertDatatypeFailSelect(t *testing.T) { mock.ExpectRollback() datatypeID := fftypes.NewUUID() err := s.UpsertDatatype(context.Background(), &core.Datatype{ID: datatypeID}, true) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -146,7 +146,7 @@ func TestUpsertDatatypeFailInsert(t *testing.T) { mock.ExpectRollback() datatypeID := fftypes.NewUUID() err := s.UpsertDatatype(context.Background(), &core.Datatype{ID: datatypeID}, true) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -158,7 +158,7 @@ func TestUpsertDatatypeFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertDatatype(context.Background(), &core.Datatype{ID: datatypeID}, true) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -170,7 +170,7 @@ func TestUpsertDatatypeFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertDatatype(context.Background(), &core.Datatype{ID: datatypeID}, true) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -179,7 +179,7 @@ func TestGetDatatypeByIDSelectFail(t *testing.T) { datatypeID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetDatatypeByID(context.Background(), "ns1", datatypeID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -215,7 +215,7 @@ func TestGetDatatypesQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.DatatypeQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetDatatypes(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/event_sql.go b/internal/database/sqlcommon/event_sql.go index 9828b44631..2af6525591 100644 --- a/internal/database/sqlcommon/event_sql.go +++ b/internal/database/sqlcommon/event_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -62,13 +64,18 @@ var ( // This is not safe to do unless you are really sure what other locks will be taken after // that in the transaction. So we defer the emission of the events to a pre-commit capture. func (s *SQLCommon) InsertEvent(ctx context.Context, event *core.Event) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } event.Sequence = -1 // the sequence is not allocated until the post-commit callback - s.addPreCommitEvent(tx, event) - return s.commitTx(ctx, tx, autoCommit) + pca := tx.PreCommitAccumulator() + if pca == nil { + pca = &eventsPCA{s: s} + tx.SetPreCommitAccumulator(pca) + } + pca.(*eventsPCA).events = append(pca.(*eventsPCA).events, event) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) setEventInsertValues(query sq.InsertBuilder, event *core.Event) sq.InsertBuilder { @@ -91,31 +98,36 @@ func (s *SQLCommon) eventInserted(ctx context.Context, event *core.Event) { log.L(ctx).Infof("Emitted %s event %s for %s:%s (correlator=%v,topic=%s)", event.Type, event.ID, event.Namespace, event.Reference, event.Correlator, event.Topic) } -func (s *SQLCommon) insertEventsPreCommit(ctx context.Context, tx *txWrapper, events []*core.Event) (err error) { +type eventsPCA struct { + s *SQLCommon + events []*core.Event +} + +func (p *eventsPCA) PreCommit(ctx context.Context, tx *dbsql.TXWrapper) (err error) { namespaces := make(map[string]bool) - for _, event := range events { + for _, event := range p.events { namespaces[event.Namespace] = true } for namespace := range namespaces { // We take the cost of a lock - scoped to the namespace(s) being updated. // This allows us to rely on the sequence to always be increasing, even when writing events // concurrently (it does not guarantee we won't get a gap in the sequences). - if err = s.acquireLockTx(ctx, namespace, tx); err != nil { + if err = p.s.AcquireLockTx(ctx, namespace, tx); err != nil { return err } } - if s.features.MultiRowInsert { + if p.s.features.MultiRowInsert { query := sq.Insert(eventsTable).Columns(eventColumns...) - for _, event := range events { - query = s.setEventInsertValues(query, event) + for _, event := range p.events { + query = p.s.setEventInsertValues(query, event) } - sequences := make([]int64, len(events)) - err := s.insertTxRows(ctx, eventsTable, tx, query, func() { - for i, event := range events { + sequences := make([]int64, len(p.events)) + err := p.s.InsertTxRows(ctx, eventsTable, tx, query, func() { + for i, event := range p.events { event.Sequence = sequences[i] - s.eventInserted(ctx, event) + p.s.eventInserted(ctx, event) } }, sequences, true /* we want the caller to be able to retry with individual upserts */) if err != nil { @@ -123,10 +135,10 @@ func (s *SQLCommon) insertEventsPreCommit(ctx context.Context, tx *txWrapper, ev } } else { // Fall back to individual inserts grouped in a TX - for _, event := range events { - query := s.setEventInsertValues(sq.Insert(eventsTable).Columns(eventColumns...), event) - event.Sequence, err = s.insertTx(ctx, eventsTable, tx, query, func() { - s.eventInserted(ctx, event) + for _, event := range p.events { + query := p.s.setEventInsertValues(sq.Insert(eventsTable).Columns(eventColumns...), event) + event.Sequence, err = p.s.InsertTx(ctx, eventsTable, tx, query, func() { + p.s.eventInserted(ctx, event) }) if err != nil { return err @@ -160,8 +172,8 @@ func (s *SQLCommon) eventResult(ctx context.Context, row *sql.Rows) (*core.Event func (s *SQLCommon) GetEventByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Event, err error) { cols := append([]string{}, eventColumns...) - cols = append(cols, sequenceColumn) - rows, _, err := s.query(ctx, eventsTable, + cols = append(cols, s.SequenceColumn()) + rows, _, err := s.Query(ctx, eventsTable, sq.Select(cols...). From(eventsTable). Where(sq.Eq{"id": id, "namespace": namespace}), @@ -184,18 +196,18 @@ func (s *SQLCommon) GetEventByID(ctx context.Context, namespace string, id *ffty return event, nil } -func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter database.Filter) (message []*core.Event, res *database.FilterResult, err error) { +func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) { cols := append([]string{}, eventColumns...) - cols = append(cols, sequenceColumn) - query, fop, fi, err := s.filterSelect( + cols = append(cols, s.SequenceColumn()) + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select(cols...).From(eventsTable), filter, eventFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, eventsTable, query) + rows, tx, err := s.Query(ctx, eventsTable, query) if err != nil { return nil, nil, err } @@ -210,6 +222,6 @@ func (s *SQLCommon) GetEvents(ctx context.Context, namespace string, filter data events = append(events, event) } - return events, s.queryRes(ctx, eventsTable, tx, fop, fi), err + return events, s.QueryRes(ctx, eventsTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/event_sql_test.go b/internal/database/sqlcommon/event_sql_test.go index ab1c579931..59d4264a2f 100644 --- a/internal/database/sqlcommon/event_sql_test.go +++ b/internal/database/sqlcommon/event_sql_test.go @@ -93,7 +93,7 @@ func TestInsertEventFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertEvent(context.Background(), &core.Event{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -104,7 +104,7 @@ func TestInsertEventFailLock(t *testing.T) { mock.ExpectRollback() eventID := fftypes.NewUUID() err := s.InsertEvent(context.Background(), &core.Event{ID: eventID, Namespace: "ns1"}) - assert.Regexp(t, "FF10345", err) + assert.Regexp(t, "FF00187", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -116,7 +116,7 @@ func TestInsertEventFailInsert(t *testing.T) { mock.ExpectRollback() eventID := fftypes.NewUUID() err := s.InsertEvent(context.Background(), &core.Event{ID: eventID, Namespace: "ns1"}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -128,7 +128,7 @@ func TestInsertEventFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertEvent(context.Background(), &core.Event{ID: eventID, Namespace: "ns1"}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -144,15 +144,18 @@ func TestInsertEventsPreCommitMultiRowOK(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("").WillReturnResult(driver.ResultNoRows) - mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1001)). AddRow(int64(1002)), ) mock.ExpectCommit() - ctx, tx, autoCommit, err := s.beginOrUseTx(context.Background()) - tx.preCommitEvents = []*core.Event{ev1, ev2} + ctx, tx, autoCommit, err := s.BeginOrUseTx(context.Background()) + tx.SetPreCommitAccumulator(&eventsPCA{ + s: &s.SQLCommon, + events: []*core.Event{ev1, ev2}, + }) assert.NoError(t, err) - err = s.commitTx(ctx, tx, autoCommit) + err = s.CommitTx(ctx, tx, autoCommit) assert.NoError(t, err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) @@ -166,11 +169,14 @@ func TestInsertEventsPreCommitMultiRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("").WillReturnResult(driver.ResultNoRows) mock.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) - ctx, tx, autoCommit, err := s.beginOrUseTx(context.Background()) - tx.preCommitEvents = []*core.Event{ev1} + ctx, tx, autoCommit, err := s.BeginOrUseTx(context.Background()) + tx.SetPreCommitAccumulator(&eventsPCA{ + s: &s.SQLCommon, + events: []*core.Event{ev1}, + }) assert.NoError(t, err) - err = s.commitTx(ctx, tx, autoCommit) - assert.Regexp(t, "FF10116", err) + err = s.CommitTx(ctx, tx, autoCommit) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -181,11 +187,14 @@ func TestInsertEventsPreCommitSingleRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("").WillReturnResult(driver.ResultNoRows) mock.ExpectExec("INSERT.*").WillReturnError(fmt.Errorf("pop")) - ctx, tx, autoCommit, err := s.beginOrUseTx(context.Background()) - tx.preCommitEvents = []*core.Event{ev1} + ctx, tx, autoCommit, err := s.BeginOrUseTx(context.Background()) + tx.SetPreCommitAccumulator(&eventsPCA{ + s: &s.SQLCommon, + events: []*core.Event{ev1}, + }) assert.NoError(t, err) - err = s.commitTx(ctx, tx, autoCommit) - assert.Regexp(t, "FF10116", err) + err = s.CommitTx(ctx, tx, autoCommit) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -195,7 +204,7 @@ func TestGetEventByIDSelectFail(t *testing.T) { eventID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetEventByID(context.Background(), "ns1", eventID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -223,7 +232,7 @@ func TestGetEventsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.EventQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetEvents(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/ffi_events_sql.go b/internal/database/sqlcommon/ffi_events_sql.go index 2eeb3a6e8d..75f9841ceb 100644 --- a/internal/database/sqlcommon/ffi_events_sql.go +++ b/internal/database/sqlcommon/ffi_events_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -48,13 +49,13 @@ var ( const ffieventsTable = "ffievents" func (s *SQLCommon) UpsertFFIEvent(ctx context.Context, event *fftypes.FFIEvent) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, ffieventsTable, tx, + rows, _, err := s.QueryTx(ctx, ffieventsTable, tx, sq.Select("id"). From(ffieventsTable). Where(sq.And{sq.Eq{"interface_id": event.Interface}, sq.Eq{"namespace": event.Namespace}, sq.Eq{"pathname": event.Pathname}}), @@ -66,7 +67,7 @@ func (s *SQLCommon) UpsertFFIEvent(ctx context.Context, event *fftypes.FFIEvent) rows.Close() if existing { - if _, err = s.updateTx(ctx, ffieventsTable, tx, + if _, err = s.UpdateTx(ctx, ffieventsTable, tx, sq.Update(ffieventsTable). Set("params", event.Params). Where(sq.And{sq.Eq{"interface_id": event.Interface}, sq.Eq{"namespace": event.Namespace}, sq.Eq{"pathname": event.Pathname}}), @@ -77,7 +78,7 @@ func (s *SQLCommon) UpsertFFIEvent(ctx context.Context, event *fftypes.FFIEvent) return err } } else { - if _, err = s.insertTx(ctx, ffieventsTable, tx, + if _, err = s.InsertTx(ctx, ffieventsTable, tx, sq.Insert(ffieventsTable). Columns(ffiEventsColumns...). Values( @@ -98,7 +99,7 @@ func (s *SQLCommon) UpsertFFIEvent(ctx context.Context, event *fftypes.FFIEvent) } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) ffiEventResult(ctx context.Context, row *sql.Rows) (*fftypes.FFIEvent, error) { @@ -120,7 +121,7 @@ func (s *SQLCommon) ffiEventResult(ctx context.Context, row *sql.Rows) (*fftypes } func (s *SQLCommon) getFFIEventPred(ctx context.Context, desc string, pred interface{}) (*fftypes.FFIEvent, error) { - rows, _, err := s.query(ctx, ffieventsTable, + rows, _, err := s.Query(ctx, ffieventsTable, sq.Select(ffiEventsColumns...). From(ffieventsTable). Where(pred), @@ -143,14 +144,14 @@ func (s *SQLCommon) getFFIEventPred(ctx context.Context, desc string, pred inter return ci, nil } -func (s *SQLCommon) GetFFIEvents(ctx context.Context, namespace string, filter database.Filter) (events []*fftypes.FFIEvent, res *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(ffiEventsColumns...).From(ffieventsTable), +func (s *SQLCommon) GetFFIEvents(ctx context.Context, namespace string, filter ffapi.Filter) (events []*fftypes.FFIEvent, res *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(ffiEventsColumns...).From(ffieventsTable), filter, ffiEventFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, ffieventsTable, query) + rows, tx, err := s.Query(ctx, ffieventsTable, query) if err != nil { return nil, nil, err } @@ -164,7 +165,7 @@ func (s *SQLCommon) GetFFIEvents(ctx context.Context, namespace string, filter d events = append(events, ci) } - return events, s.queryRes(ctx, ffieventsTable, tx, fop, fi), err + return events, s.QueryRes(ctx, ffieventsTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/ffi_events_sql_test.go b/internal/database/sqlcommon/ffi_events_sql_test.go index 80ba6e8d9f..355bf5ce2c 100644 --- a/internal/database/sqlcommon/ffi_events_sql_test.go +++ b/internal/database/sqlcommon/ffi_events_sql_test.go @@ -94,7 +94,7 @@ func TestFFIEventDBFailBeginTransaction(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertFFIEvent(context.Background(), &fftypes.FFIEvent{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -116,7 +116,7 @@ func TestFFIEventDBFailInsert(t *testing.T) { ID: fftypes.NewUUID(), } err := s.UpsertFFIEvent(context.Background(), event) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/ffi_methods_sql.go b/internal/database/sqlcommon/ffi_methods_sql.go index 4c341116f0..cb673dfc17 100644 --- a/internal/database/sqlcommon/ffi_methods_sql.go +++ b/internal/database/sqlcommon/ffi_methods_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -49,13 +50,13 @@ var ( const ffimethodsTable = "ffimethods" func (s *SQLCommon) UpsertFFIMethod(ctx context.Context, method *fftypes.FFIMethod) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, ffimethodsTable, tx, + rows, _, err := s.QueryTx(ctx, ffimethodsTable, tx, sq.Select("id"). From(ffimethodsTable). Where(sq.And{sq.Eq{"interface_id": method.Interface}, sq.Eq{"namespace": method.Namespace}, sq.Eq{"pathname": method.Pathname}}), @@ -67,7 +68,7 @@ func (s *SQLCommon) UpsertFFIMethod(ctx context.Context, method *fftypes.FFIMeth rows.Close() if existing { - if _, err = s.updateTx(ctx, ffimethodsTable, tx, + if _, err = s.UpdateTx(ctx, ffimethodsTable, tx, sq.Update(ffimethodsTable). Set("params", method.Params). Set("returns", method.Returns). @@ -79,7 +80,7 @@ func (s *SQLCommon) UpsertFFIMethod(ctx context.Context, method *fftypes.FFIMeth return err } } else { - if _, err = s.insertTx(ctx, ffimethodsTable, tx, + if _, err = s.InsertTx(ctx, ffimethodsTable, tx, sq.Insert(ffimethodsTable). Columns(ffiMethodsColumns...). Values( @@ -101,7 +102,7 @@ func (s *SQLCommon) UpsertFFIMethod(ctx context.Context, method *fftypes.FFIMeth } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) ffiMethodResult(ctx context.Context, row *sql.Rows) (*fftypes.FFIMethod, error) { @@ -124,7 +125,7 @@ func (s *SQLCommon) ffiMethodResult(ctx context.Context, row *sql.Rows) (*fftype } func (s *SQLCommon) getFFIMethodPred(ctx context.Context, desc string, pred interface{}) (*fftypes.FFIMethod, error) { - rows, _, err := s.query(ctx, ffimethodsTable, + rows, _, err := s.Query(ctx, ffimethodsTable, sq.Select(ffiMethodsColumns...). From(ffimethodsTable). Where(pred), @@ -147,14 +148,14 @@ func (s *SQLCommon) getFFIMethodPred(ctx context.Context, desc string, pred inte return ci, nil } -func (s *SQLCommon) GetFFIMethods(ctx context.Context, namespace string, filter database.Filter) (methods []*fftypes.FFIMethod, res *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(ffiMethodsColumns...).From(ffimethodsTable), +func (s *SQLCommon) GetFFIMethods(ctx context.Context, namespace string, filter ffapi.Filter) (methods []*fftypes.FFIMethod, res *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(ffiMethodsColumns...).From(ffimethodsTable), filter, ffiMethodFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, ffimethodsTable, query) + rows, tx, err := s.Query(ctx, ffimethodsTable, query) if err != nil { return nil, nil, err } @@ -168,7 +169,7 @@ func (s *SQLCommon) GetFFIMethods(ctx context.Context, namespace string, filter methods = append(methods, ci) } - return methods, s.queryRes(ctx, ffimethodsTable, tx, fop, fi), err + return methods, s.QueryRes(ctx, ffimethodsTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/ffi_methods_sql_test.go b/internal/database/sqlcommon/ffi_methods_sql_test.go index e0edf453ba..d51ded9b21 100644 --- a/internal/database/sqlcommon/ffi_methods_sql_test.go +++ b/internal/database/sqlcommon/ffi_methods_sql_test.go @@ -106,7 +106,7 @@ func TestFFIMethodDBFailBeginTransaction(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertFFIMethod(context.Background(), &fftypes.FFIMethod{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -128,7 +128,7 @@ func TestFFIMethodDBFailInsert(t *testing.T) { ID: fftypes.NewUUID(), } err := s.UpsertFFIMethod(context.Background(), event) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/ffi_sql.go b/internal/database/sqlcommon/ffi_sql.go index 43bb91a4e7..0d1f4ff2b4 100644 --- a/internal/database/sqlcommon/ffi_sql.go +++ b/internal/database/sqlcommon/ffi_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -46,13 +47,13 @@ var ( const ffiTable = "ffi" func (s *SQLCommon) UpsertFFI(ctx context.Context, ffi *fftypes.FFI) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, ffiTable, tx, + rows, _, err := s.QueryTx(ctx, ffiTable, tx, sq.Select("id"). From(ffiTable). Where(sq.Eq{ @@ -67,7 +68,7 @@ func (s *SQLCommon) UpsertFFI(ctx context.Context, ffi *fftypes.FFI) (err error) rows.Close() if existing { - if _, err = s.updateTx(ctx, ffiTable, tx, + if _, err = s.UpdateTx(ctx, ffiTable, tx, sq.Update(ffiTable). Set("name", ffi.Name). Set("version", ffi.Version). @@ -80,7 +81,7 @@ func (s *SQLCommon) UpsertFFI(ctx context.Context, ffi *fftypes.FFI) (err error) return err } } else { - if _, err = s.insertTx(ctx, ffiTable, tx, + if _, err = s.InsertTx(ctx, ffiTable, tx, sq.Insert(ffiTable). Columns(ffiColumns...). Values( @@ -99,7 +100,7 @@ func (s *SQLCommon) UpsertFFI(ctx context.Context, ffi *fftypes.FFI) (err error) } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) ffiResult(ctx context.Context, row *sql.Rows) (*fftypes.FFI, error) { @@ -119,7 +120,7 @@ func (s *SQLCommon) ffiResult(ctx context.Context, row *sql.Rows) (*fftypes.FFI, } func (s *SQLCommon) getFFIPred(ctx context.Context, desc string, pred interface{}) (*fftypes.FFI, error) { - rows, _, err := s.query(ctx, ffiTable, + rows, _, err := s.Query(ctx, ffiTable, sq.Select(ffiColumns...). From(ffiTable). Where(pred), @@ -142,15 +143,15 @@ func (s *SQLCommon) getFFIPred(ctx context.Context, desc string, pred interface{ return ffi, nil } -func (s *SQLCommon) GetFFIs(ctx context.Context, namespace string, filter database.Filter) (ffis []*fftypes.FFI, res *database.FilterResult, err error) { +func (s *SQLCommon) GetFFIs(ctx context.Context, namespace string, filter ffapi.Filter) (ffis []*fftypes.FFI, res *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(ffiColumns...).From(ffiTable), + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(ffiColumns...).From(ffiTable), filter, ffiFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, ffiTable, query) + rows, tx, err := s.Query(ctx, ffiTable, query) if err != nil { return nil, nil, err } @@ -165,7 +166,7 @@ func (s *SQLCommon) GetFFIs(ctx context.Context, namespace string, filter databa ffis = append(ffis, cd) } - return ffis, s.queryRes(ctx, ffiTable, tx, fop, fi), err + return ffis, s.QueryRes(ctx, ffiTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/ffi_sql_test.go b/internal/database/sqlcommon/ffi_sql_test.go index a7338dbabd..e623d8435e 100644 --- a/internal/database/sqlcommon/ffi_sql_test.go +++ b/internal/database/sqlcommon/ffi_sql_test.go @@ -105,7 +105,7 @@ func TestFFIDBFailBeginTransaction(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertFFI(context.Background(), &fftypes.FFI{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -127,7 +127,7 @@ func TestFFIDBFailInsert(t *testing.T) { ID: fftypes.NewUUID(), } err := s.UpsertFFI(context.Background(), ffi) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/filter_sql.go b/internal/database/sqlcommon/filter_sql.go deleted file mode 100644 index 3f04a4a497..0000000000 --- a/internal/database/sqlcommon/filter_sql.go +++ /dev/null @@ -1,234 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "fmt" - "strings" - - sq "github.com/Masterminds/squirrel" - "github.com/hyperledger/firefly-common/pkg/i18n" - "github.com/hyperledger/firefly/internal/coremsgs" - "github.com/hyperledger/firefly/pkg/database" -) - -func (s *SQLCommon) filterSelect(ctx context.Context, tableName string, sel sq.SelectBuilder, filter database.Filter, typeMap map[string]string, defaultSort []interface{}, preconditions ...sq.Sqlizer) (sq.SelectBuilder, sq.Sqlizer, *database.FilterInfo, error) { - fi, err := filter.Finalize() - if err != nil { - return sel, nil, nil, err - } - if len(fi.Sort) == 0 { - for _, s := range defaultSort { - switch v := s.(type) { - case string: - fi.Sort = append(fi.Sort, &database.SortField{Field: v, Descending: true}) - case *database.SortField: - fi.Sort = append(fi.Sort, v) - default: - panic(fmt.Sprintf("unknown sort type: %v", v)) - } - } - } - fop, err := s.filterSelectFinalized(ctx, tableName, fi, typeMap, preconditions...) - sel = sel.Where(fop) - sort := make([]string, len(fi.Sort)) - var sortString string - for i, sf := range fi.Sort { - direction := "" - if sf.Descending { - direction = " DESC" - } - nulls := "" - if sf.Nulls == database.NullsFirst { - nulls = " NULLS FIRST" - } else if sf.Nulls == database.NullsLast { - nulls = " NULLS LAST" - } - sort[i] = fmt.Sprintf("%s%s%s", s.mapField(tableName, sf.Field, typeMap), direction, nulls) - } - sortString = strings.Join(sort, ", ") - sel = sel.OrderBy(sortString) - if err == nil { - if fi.Skip > 0 { - sel = sel.Offset(fi.Skip) - } - if fi.Limit > 0 { - sel = sel.Limit(fi.Limit) - } - } - return sel, fop, fi, err -} - -func (s *SQLCommon) filterSelectFinalized(ctx context.Context, tableName string, fi *database.FilterInfo, tm map[string]string, preconditions ...sq.Sqlizer) (sq.Sqlizer, error) { - fop, err := s.filterOp(ctx, tableName, fi, tm) - if err != nil { - return nil, err - } - if len(preconditions) > 0 { - and := make(sq.And, len(preconditions)+1) - copy(and, preconditions) - and[len(preconditions)] = fop - fop = and - } - return fop, nil -} - -func (s *SQLCommon) buildUpdate(sel sq.UpdateBuilder, update database.Update, typeMap map[string]string) (sq.UpdateBuilder, error) { - ui, err := update.Finalize() - if err != nil { - return sel, err - } - for _, so := range ui.SetOperations { - - sel = sel.Set(s.mapField("", so.Field, typeMap), so.Value) - } - return sel, nil -} - -func (s *SQLCommon) filterUpdate(ctx context.Context, update sq.UpdateBuilder, filter database.Filter, typeMap map[string]string) (sq.UpdateBuilder, error) { - fi, err := filter.Finalize() - var fop sq.Sqlizer - if err == nil { - fop, err = s.filterOp(ctx, "", fi, typeMap) - } - if err != nil { - return update, err - } - return update.Where(fop), nil -} - -func (s *SQLCommon) escapeLike(value database.FieldSerialization) string { - v, _ := value.Value() - vs, _ := v.(string) - vs = strings.ReplaceAll(vs, "[", "[[]") - vs = strings.ReplaceAll(vs, "%", "[%]") - vs = strings.ReplaceAll(vs, "_", "[_]") - return vs -} - -func (s *SQLCommon) mapField(tableName, fieldName string, tm map[string]string) string { - if fieldName == "sequence" { - if tableName == "" { - return sequenceColumn - } - return fmt.Sprintf("%s.seq", tableName) - } - var field = fieldName - if tm != nil { - if mf, ok := tm[fieldName]; ok { - field = mf - } - } - if tableName != "" { - field = fmt.Sprintf("%s.%s", tableName, field) - } - return field -} - -// newILike uses ILIKE if supported by DB, otherwise the "lower" approach -func (s *SQLCommon) newILike(field, value string) sq.Sqlizer { - if s.features.UseILIKE { - return sq.ILike{field: value} - } - return sq.Like{fmt.Sprintf("lower(%s)", field): strings.ToLower(value)} -} - -// newNotILike uses ILIKE if supported by DB, otherwise the "lower" approach -func (s *SQLCommon) newNotILike(field, value string) sq.Sqlizer { - if s.features.UseILIKE { - return sq.NotILike{field: value} - } - return sq.NotLike{fmt.Sprintf("lower(%s)", field): strings.ToLower(value)} -} - -func (s *SQLCommon) filterOp(ctx context.Context, tableName string, op *database.FilterInfo, tm map[string]string) (sq.Sqlizer, error) { - switch op.Op { - case database.FilterOpOr: - return s.filterOr(ctx, tableName, op, tm) - case database.FilterOpAnd: - return s.filterAnd(ctx, tableName, op, tm) - case database.FilterOpEq: - return sq.Eq{s.mapField(tableName, op.Field, tm): op.Value}, nil - case database.FilterOpIEq: - return s.newILike(s.mapField(tableName, op.Field, tm), s.escapeLike(op.Value)), nil - case database.FilterOpIn: - return sq.Eq{s.mapField(tableName, op.Field, tm): op.Values}, nil - case database.FilterOpNeq: - return sq.NotEq{s.mapField(tableName, op.Field, tm): op.Value}, nil - case database.FilterOpNIeq: - return s.newNotILike(s.mapField(tableName, op.Field, tm), s.escapeLike(op.Value)), nil - case database.FilterOpNotIn: - return sq.NotEq{s.mapField(tableName, op.Field, tm): op.Values}, nil - case database.FilterOpCont: - return sq.Like{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%%%s%%", s.escapeLike(op.Value))}, nil - case database.FilterOpNotCont: - return sq.NotLike{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%%%s%%", s.escapeLike(op.Value))}, nil - case database.FilterOpICont: - return s.newILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%%%s%%", s.escapeLike(op.Value))), nil - case database.FilterOpNotICont: - return s.newNotILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%s%%", s.escapeLike(op.Value))), nil - case database.FilterOpStartsWith: - return sq.Like{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%s%%", s.escapeLike(op.Value))}, nil - case database.FilterOpNotStartsWith: - return sq.NotLike{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%s%%", s.escapeLike(op.Value))}, nil - case database.FilterOpIStartsWith: - return s.newILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%s%%", s.escapeLike(op.Value))), nil - case database.FilterOpNotIStartsWith: - return s.newNotILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%s%%", s.escapeLike(op.Value))), nil - case database.FilterOpEndsWith: - return sq.Like{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%%%s", s.escapeLike(op.Value))}, nil - case database.FilterOpNotEndsWith: - return sq.NotLike{s.mapField(tableName, op.Field, tm): fmt.Sprintf("%%%s", s.escapeLike(op.Value))}, nil - case database.FilterOpIEndsWith: - return s.newILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%%%s", s.escapeLike(op.Value))), nil - case database.FilterOpNotIEndsWith: - return s.newNotILike(s.mapField(tableName, op.Field, tm), fmt.Sprintf("%%%s", s.escapeLike(op.Value))), nil - case database.FilterOpGt: - return sq.Gt{s.mapField(tableName, op.Field, tm): op.Value}, nil - case database.FilterOpGte: - return sq.GtOrEq{s.mapField(tableName, op.Field, tm): op.Value}, nil - case database.FilterOpLt: - return sq.Lt{s.mapField(tableName, op.Field, tm): op.Value}, nil - case database.FilterOpLte: - return sq.LtOrEq{s.mapField(tableName, op.Field, tm): op.Value}, nil - default: - return nil, i18n.NewError(ctx, coremsgs.MsgUnsupportedSQLOpInFilter, op.Op) - } -} - -func (s *SQLCommon) filterOr(ctx context.Context, tableName string, op *database.FilterInfo, tm map[string]string) (sq.Sqlizer, error) { - var err error - or := make(sq.Or, len(op.Children)) - for i, c := range op.Children { - if or[i], err = s.filterOp(ctx, tableName, c, tm); err != nil { - return nil, err - } - } - return or, nil -} - -func (s *SQLCommon) filterAnd(ctx context.Context, tableName string, op *database.FilterInfo, tm map[string]string) (sq.Sqlizer, error) { - var err error - and := make(sq.And, len(op.Children)) - for i, c := range op.Children { - if and[i], err = s.filterOp(ctx, tableName, c, tm); err != nil { - return nil, err - } - } - return and, nil -} diff --git a/internal/database/sqlcommon/filter_sql_test.go b/internal/database/sqlcommon/filter_sql_test.go deleted file mode 100644 index d75fb9a9f0..0000000000 --- a/internal/database/sqlcommon/filter_sql_test.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "database/sql/driver" - "testing" - - "github.com/Masterminds/squirrel" - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly/pkg/database" - "github.com/stretchr/testify/assert" -) - -func TestSQLQueryFactory(t *testing.T) { - s, _ := newMockProvider().init() - s.individualSort = true - fb := database.MessageQueryFactory.NewFilter(context.Background()) - f := fb.And( - fb.Eq("tag", "tag1"), - fb.Or( - fb.Eq("id", "35c11cba-adff-4a4d-970a-02e3a0858dc8"), - fb.Eq("id", "caefb9d1-9fc9-4d6a-a155-514d3139adf7"), - ), - fb.Gt("sequence", "12345"), - fb.Eq("confirmed", nil), - ). - Skip(50). - Limit(25). - Sort("-id"). - Sort("tag"). - Sort("-sequence") - - sel := squirrel.Select("*").From("mytable") - sel, _, _, err := s.filterSelect(context.Background(), "", sel, f, map[string]string{ - "namespace": "ns", - }, []interface{}{"sequence"}) - assert.NoError(t, err) - - sqlFilter, args, err := sel.ToSql() - assert.NoError(t, err) - assert.Equal(t, "SELECT * FROM mytable WHERE (tag = ? AND (id = ? OR id = ?) AND seq > ? AND confirmed IS NULL) ORDER BY id DESC, tag, seq DESC LIMIT 25 OFFSET 50", sqlFilter) - assert.Equal(t, "tag1", args[0]) - assert.Equal(t, "35c11cba-adff-4a4d-970a-02e3a0858dc8", args[1]) - assert.Equal(t, "caefb9d1-9fc9-4d6a-a155-514d3139adf7", args[2]) - assert.Equal(t, int64(12345), args[3]) -} - -func TestSQLQueryFactoryExtraOps(t *testing.T) { - - s, _ := newMockProvider().init() - fb := database.MessageQueryFactory.NewFilter(context.Background()) - u := fftypes.MustParseUUID("4066ABDC-8BBD-4472-9D29-1A55B467F9B9") - f := fb.And( - fb.In("created", []driver.Value{1, 2, 3}), - fb.NotIn("created", []driver.Value{1, 2, 3}), - fb.Eq("id", u), - fb.In("id", []driver.Value{*u}), - fb.Neq("id", nil), - fb.Lt("created", "0"), - fb.Lte("created", "0"), - fb.Gte("created", "0"), - fb.Neq("created", "0"), - fb.Gt("sequence", 12345), - fb.Contains("topics", "abc"), - fb.NotContains("topics", "def"), - fb.IContains("topics", "ghi"), - fb.NotIContains("topics", "jkl"), - ). - Descending() - - sel := squirrel.Select("*").From("mytable AS mt") - sel, _, _, err := s.filterSelect(context.Background(), "mt", sel, f, nil, []interface{}{"sequence"}) - assert.NoError(t, err) - - sqlFilter, _, err := sel.ToSql() - assert.NoError(t, err) - assert.Equal(t, "SELECT * FROM mytable AS mt WHERE (mt.created IN (?,?,?) AND mt.created NOT IN (?,?,?) AND mt.id = ? AND mt.id IN (?) AND mt.id IS NOT NULL AND mt.created < ? AND mt.created <= ? AND mt.created >= ? AND mt.created <> ? AND mt.seq > ? AND mt.topics LIKE ? AND mt.topics NOT LIKE ? AND mt.topics ILIKE ? AND mt.topics NOT ILIKE ?) ORDER BY mt.seq DESC", sqlFilter) -} - -func TestSQLQueryFactoryEvenMoreOps(t *testing.T) { - - s, _ := newMockProvider().init() - fb := database.MessageQueryFactory.NewFilter(context.Background()) - u := fftypes.MustParseUUID("4066ABDC-8BBD-4472-9D29-1A55B467F9B9") - f := fb.And( - fb.IEq("id", u), - fb.NIeq("id", nil), - fb.StartsWith("topics", "abc"), - fb.NotStartsWith("topics", "def"), - fb.IStartsWith("topics", "ghi"), - fb.NotIStartsWith("topics", "jkl"), - fb.EndsWith("topics", "mno"), - fb.NotEndsWith("topics", "pqr"), - fb.IEndsWith("topics", "sty"), - fb.NotIEndsWith("topics", "vwx"), - ). - Descending() - - sel := squirrel.Select("*").From("mytable AS mt") - sel, _, _, err := s.filterSelect(context.Background(), "mt", sel, f, nil, []interface{}{"sequence"}) - assert.NoError(t, err) - - sqlFilter, _, err := sel.ToSql() - assert.NoError(t, err) - assert.Equal(t, "SELECT * FROM mytable AS mt WHERE (mt.id ILIKE ? AND mt.id NOT ILIKE ? AND mt.topics LIKE ? AND mt.topics NOT LIKE ? AND mt.topics ILIKE ? AND mt.topics NOT ILIKE ? AND mt.topics LIKE ? AND mt.topics NOT LIKE ? AND mt.topics ILIKE ? AND mt.topics NOT ILIKE ?) ORDER BY mt.seq DESC", sqlFilter) -} - -func TestSQLQueryFactoryFinalizeFail(t *testing.T) { - s, _ := newMockProvider().init() - fb := database.MessageQueryFactory.NewFilter(context.Background()) - sel := squirrel.Select("*").From("mytable") - _, _, _, err := s.filterSelect(context.Background(), "ns", sel, fb.Eq("tag", map[bool]bool{true: false}), nil, []interface{}{"sequence"}) - assert.Regexp(t, "FF00143.*tag", err) -} - -func TestSQLQueryFactoryBadOp(t *testing.T) { - - s, _ := newMockProvider().init() - _, err := s.filterSelectFinalized(context.Background(), "", &database.FilterInfo{ - Op: database.FilterOp("wrong"), - }, nil) - assert.Regexp(t, "FF10150.*wrong", err) -} - -func TestSQLQueryFactoryBadOpInOr(t *testing.T) { - - s, _ := newMockProvider().init() - _, err := s.filterSelectFinalized(context.Background(), "", &database.FilterInfo{ - Op: database.FilterOpOr, - Children: []*database.FilterInfo{ - {Op: database.FilterOp("wrong")}, - }, - }, nil) - assert.Regexp(t, "FF10150.*wrong", err) -} - -func TestSQLQueryFactoryBadOpInAnd(t *testing.T) { - - s, _ := newMockProvider().init() - _, err := s.filterSelectFinalized(context.Background(), "", &database.FilterInfo{ - Op: database.FilterOpAnd, - Children: []*database.FilterInfo{ - {Op: database.FilterOp("wrong")}, - }, - }, nil) - assert.Regexp(t, "FF10150.*wrong", err) -} - -func TestSQLQueryFactoryDefaultSort(t *testing.T) { - - s, _ := newMockProvider().init() - sel := squirrel.Select("*").From("mytable") - fb := database.MessageQueryFactory.NewFilter(context.Background()) - f := fb.And( - fb.Eq("tag", "tag1"), - ) - sel, _, _, err := s.filterSelect(context.Background(), "", sel, f, nil, []interface{}{ - &database.SortField{ - Field: "sequence", - Descending: true, - Nulls: database.NullsLast, - }, - }) - assert.NoError(t, err) - - sqlFilter, args, err := sel.ToSql() - assert.NoError(t, err) - assert.Equal(t, "SELECT * FROM mytable WHERE (tag = ?) ORDER BY seq DESC NULLS LAST", sqlFilter) - assert.Equal(t, "tag1", args[0]) -} - -func TestSQLQueryFactoryDefaultSortBadType(t *testing.T) { - - s, _ := newMockProvider().init() - sel := squirrel.Select("*").From("mytable") - fb := database.MessageQueryFactory.NewFilter(context.Background()) - f := fb.And( - fb.Eq("tag", "tag1"), - ) - assert.PanicsWithValue(t, "unknown sort type: 100", func() { - s.filterSelect(context.Background(), "", sel, f, nil, []interface{}{100}) - }) -} - -func TestILIKE(t *testing.T) { - s, _ := newMockProvider().init() - - s.features.UseILIKE = true - q := s.newILike("test", "value") - sqlString, _, _ := q.ToSql() - assert.Regexp(t, "ILIKE", sqlString) - - s.features.UseILIKE = false - q = s.newILike("test", "value") - sqlString, _, _ = q.ToSql() - assert.Regexp(t, "lower\\(test\\)", sqlString) -} - -func TestNotILIKE(t *testing.T) { - s, _ := newMockProvider().init() - - s.features.UseILIKE = true - q := s.newNotILike("test", "value") - sqlString, _, _ := q.ToSql() - assert.Regexp(t, "ILIKE", sqlString) - - s.features.UseILIKE = false - q = s.newNotILike("test", "value") - sqlString, _, _ = q.ToSql() - assert.Regexp(t, "lower\\(test\\)", sqlString) -} diff --git a/internal/database/sqlcommon/group_sql.go b/internal/database/sqlcommon/group_sql.go index 2917648e81..8309129a12 100644 --- a/internal/database/sqlcommon/group_sql.go +++ b/internal/database/sqlcommon/group_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -46,11 +48,11 @@ var ( const groupsTable = "groups" func (s *SQLCommon) UpsertGroup(ctx context.Context, group *core.Group, optimization database.UpsertOptimization) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // We use an upsert optimization here for performance, but also to account for the situation where two threads // try to perform an insert concurrently and ensure a non-failure outcome. @@ -66,7 +68,7 @@ func (s *SQLCommon) UpsertGroup(ctx context.Context, group *core.Group, optimiza existing := false if !optimized { // Do a select within the transaction to determine if the UUID already exists - groupRows, _, err := s.queryTx(ctx, groupsTable, tx, + groupRows, _, err := s.QueryTx(ctx, groupsTable, tx, sq.Select("hash"). From(groupsTable). Where(sq.Eq{"hash": group.Hash, "namespace_local": group.LocalNamespace}), @@ -97,12 +99,12 @@ func (s *SQLCommon) UpsertGroup(ctx context.Context, group *core.Group, optimiza } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } -func (s *SQLCommon) attemptGroupUpdate(ctx context.Context, tx *txWrapper, group *core.Group) (int64, error) { +func (s *SQLCommon) attemptGroupUpdate(ctx context.Context, tx *dbsql.TXWrapper, group *core.Group) (int64, error) { // Update the group - return s.updateTx(ctx, groupsTable, tx, + return s.UpdateTx(ctx, groupsTable, tx, sq.Update(groupsTable). Set("message_id", group.Message). Set("name", group.Name). @@ -115,8 +117,8 @@ func (s *SQLCommon) attemptGroupUpdate(ctx context.Context, tx *txWrapper, group ) } -func (s *SQLCommon) attemptGroupInsert(ctx context.Context, tx *txWrapper, group *core.Group, requestConflictEmptyResult bool) error { - _, err := s.insertTxExt(ctx, groupsTable, tx, +func (s *SQLCommon) attemptGroupInsert(ctx context.Context, tx *dbsql.TXWrapper, group *core.Group, requestConflictEmptyResult bool) error { + _, err := s.InsertTxExt(ctx, groupsTable, tx, sq.Insert(groupsTable). Columns(groupColumns...). Values( @@ -135,10 +137,10 @@ func (s *SQLCommon) attemptGroupInsert(ctx context.Context, tx *txWrapper, group return err } -func (s *SQLCommon) updateMembers(ctx context.Context, tx *txWrapper, group *core.Group, existing bool) error { +func (s *SQLCommon) updateMembers(ctx context.Context, tx *dbsql.TXWrapper, group *core.Group, existing bool) error { if existing { - if err := s.deleteTx(ctx, groupsTable, tx, + if err := s.DeleteTx(ctx, groupsTable, tx, sq.Delete("members"). Where(sq.And{ sq.Eq{"group_hash": group.Hash}, @@ -157,7 +159,7 @@ func (s *SQLCommon) updateMembers(ctx context.Context, tx *txWrapper, group *cor if requiredMember.Node == nil { return i18n.NewError(ctx, i18n.MsgEmptyMemberNode, requiredIdx) } - if _, err := s.insertTx(ctx, groupsTable, tx, + if _, err := s.InsertTx(ctx, groupsTable, tx, sq.Insert("members"). Columns( "group_hash", @@ -190,7 +192,7 @@ func (s *SQLCommon) loadMembers(ctx context.Context, groups []*core.Group) error } } - members, _, err := s.query(ctx, groupsTable, + members, _, err := s.Query(ctx, groupsTable, sq.Select( "group_hash", "identity", @@ -247,7 +249,7 @@ func (s *SQLCommon) groupResult(ctx context.Context, row *sql.Rows) (*core.Group func (s *SQLCommon) GetGroupByHash(ctx context.Context, namespace string, hash *fftypes.Bytes32) (group *core.Group, err error) { - rows, _, err := s.query(ctx, groupsTable, + rows, _, err := s.Query(ctx, groupsTable, sq.Select(groupColumns...). From(groupsTable). Where(sq.Eq{"hash": hash, "namespace_local": namespace}), @@ -275,14 +277,14 @@ func (s *SQLCommon) GetGroupByHash(ctx context.Context, namespace string, hash * return group, nil } -func (s *SQLCommon) GetGroups(ctx context.Context, namespace string, filter database.Filter) (group []*core.Group, res *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(groupColumns...).From(groupsTable), +func (s *SQLCommon) GetGroups(ctx context.Context, namespace string, filter ffapi.Filter) (group []*core.Group, res *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(groupColumns...).From(groupsTable), filter, groupFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace_local": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, groupsTable, query) + rows, tx, err := s.Query(ctx, groupsTable, query) if err != nil { return nil, nil, err } @@ -304,5 +306,5 @@ func (s *SQLCommon) GetGroups(ctx context.Context, namespace string, filter data } } - return groups, s.queryRes(ctx, groupsTable, tx, fop, fi), err + return groups, s.QueryRes(ctx, groupsTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/group_sql_test.go b/internal/database/sqlcommon/group_sql_test.go index ee01168f17..44948b462a 100644 --- a/internal/database/sqlcommon/group_sql_test.go +++ b/internal/database/sqlcommon/group_sql_test.go @@ -120,7 +120,7 @@ func TestUpsertGroupFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertGroup(context.Background(), &core.Group{}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -131,7 +131,7 @@ func TestUpsertGroupFailSelect(t *testing.T) { mock.ExpectRollback() groupID := fftypes.NewRandB32() err := s.UpsertGroup(context.Background(), &core.Group{Hash: groupID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -143,7 +143,7 @@ func TestUpsertGroupFailInsert(t *testing.T) { mock.ExpectRollback() groupID := fftypes.NewRandB32() err := s.UpsertGroup(context.Background(), &core.Group{Hash: groupID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -155,7 +155,7 @@ func TestUpsertGroupFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertGroup(context.Background(), &core.Group{Hash: groupID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -175,7 +175,7 @@ func TestUpsertGroupFailMembers(t *testing.T) { }, }, }, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -187,7 +187,7 @@ func TestUpsertGroupFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertGroup(context.Background(), &core.Group{Hash: groupID}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -196,14 +196,15 @@ func TestUpdateMembersRecreateFail(t *testing.T) { groupID := fftypes.NewRandB32() mock.ExpectBegin() mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) - tx, _ := s.db.Begin() - err := s.updateMembers(context.Background(), &txWrapper{sqlTX: tx}, &core.Group{ + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + err = s.updateMembers(ctx, tx, &core.Group{ Hash: groupID, GroupIdentity: core.GroupIdentity{ Members: core.Members{{Node: fftypes.NewUUID()}}, }, }, true) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -211,8 +212,9 @@ func TestUpdateMembersMissingOrg(t *testing.T) { s, mock := newMockProvider().init() groupID := fftypes.NewRandB32() mock.ExpectBegin() - tx, _ := s.db.Begin() - err := s.updateMembers(context.Background(), &txWrapper{sqlTX: tx}, &core.Group{ + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + err = s.updateMembers(ctx, tx, &core.Group{ Hash: groupID, GroupIdentity: core.GroupIdentity{ Members: core.Members{{Node: fftypes.NewUUID()}}, @@ -226,8 +228,9 @@ func TestUpdateMembersMissingNode(t *testing.T) { s, mock := newMockProvider().init() groupID := fftypes.NewRandB32() mock.ExpectBegin() - tx, _ := s.db.Begin() - err := s.updateMembers(context.Background(), &txWrapper{sqlTX: tx}, &core.Group{ + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + err = s.updateMembers(ctx, tx, &core.Group{ Hash: groupID, GroupIdentity: core.GroupIdentity{ Members: core.Members{{Identity: "0x12345"}}, @@ -241,12 +244,13 @@ func TestUpdateGroupDataDeleteFail(t *testing.T) { s, mock := newMockProvider().init() groupID := fftypes.NewRandB32() mock.ExpectBegin() - tx, _ := s.db.Begin() + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) - err := s.updateMembers(context.Background(), &txWrapper{sqlTX: tx}, &core.Group{ + err = s.updateMembers(ctx, tx, &core.Group{ Hash: groupID, }, true) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -254,15 +258,16 @@ func TestUpdateGroupDataAddFail(t *testing.T) { s, mock := newMockProvider().init() groupID := fftypes.NewRandB32() mock.ExpectBegin() - tx, _ := s.db.Begin() + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) - err := s.updateMembers(context.Background(), &txWrapper{sqlTX: tx}, &core.Group{ + err = s.updateMembers(ctx, tx, &core.Group{ Hash: groupID, GroupIdentity: core.GroupIdentity{ Members: core.Members{{Identity: "0x12345", Node: fftypes.NewUUID()}}, }, }, false) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -271,7 +276,7 @@ func TestLoadMembersQueryFail(t *testing.T) { groupID := fftypes.NewRandB32() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) err := s.loadMembers(context.Background(), []*core.Group{{Hash: groupID}}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -300,7 +305,7 @@ func TestGetGroupByIDSelectFail(t *testing.T) { groupID := fftypes.NewRandB32() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetGroupByHash(context.Background(), "ns1", groupID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -330,7 +335,7 @@ func TestGetGroupByIDLoadMembersFail(t *testing.T) { AddRow(nil, "ns1", "ns1", "name1", fftypes.NewRandB32(), fftypes.Now())) mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetGroupByHash(context.Background(), "ns1", groupID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -346,7 +351,7 @@ func TestGetGroupsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.GroupQueryFactory.NewFilter(context.Background()).Eq("hash", "") _, _, err := s.GetGroups(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -366,6 +371,6 @@ func TestGetGroupsLoadMembersFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.GroupQueryFactory.NewFilter(context.Background()).Gt("created", "0") _, _, err := s.GetGroups(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/identity_sql.go b/internal/database/sqlcommon/identity_sql.go index 84b8aff658..0ddf43838e 100644 --- a/internal/database/sqlcommon/identity_sql.go +++ b/internal/database/sqlcommon/identity_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -56,9 +58,9 @@ var ( const identitiesTable = "identities" -func (s *SQLCommon) attemptIdentityUpdate(ctx context.Context, tx *txWrapper, identity *core.Identity) (int64, error) { +func (s *SQLCommon) attemptIdentityUpdate(ctx context.Context, tx *dbsql.TXWrapper, identity *core.Identity) (int64, error) { identity.Updated = fftypes.Now() - return s.updateTx(ctx, identitiesTable, tx, + return s.UpdateTx(ctx, identitiesTable, tx, sq.Update(identitiesTable). Set("did", identity.DID). Set("parent", identity.Parent). @@ -79,10 +81,10 @@ func (s *SQLCommon) attemptIdentityUpdate(ctx context.Context, tx *txWrapper, id }) } -func (s *SQLCommon) attemptIdentityInsert(ctx context.Context, tx *txWrapper, identity *core.Identity, requestConflictEmptyResult bool) (err error) { +func (s *SQLCommon) attemptIdentityInsert(ctx context.Context, tx *dbsql.TXWrapper, identity *core.Identity, requestConflictEmptyResult bool) (err error) { identity.Created = fftypes.Now() identity.Updated = identity.Created - _, err = s.insertTxExt(ctx, identitiesTable, tx, + _, err = s.InsertTxExt(ctx, identitiesTable, tx, sq.Insert(identitiesTable). Columns(identityColumns...). Values( @@ -107,11 +109,11 @@ func (s *SQLCommon) attemptIdentityInsert(ctx context.Context, tx *txWrapper, id } func (s *SQLCommon) UpsertIdentity(ctx context.Context, identity *core.Identity, optimization database.UpsertOptimization) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) optimized := false if optimization == database.UpsertOptimizationNew { @@ -124,7 +126,7 @@ func (s *SQLCommon) UpsertIdentity(ctx context.Context, identity *core.Identity, if !optimized { // Do a select within the transaction to detemine if the UUID already exists - msgRows, _, err := s.queryTx(ctx, identitiesTable, tx, + msgRows, _, err := s.QueryTx(ctx, identitiesTable, tx, sq.Select("id"). From(identitiesTable). Where(sq.Eq{"id": identity.ID, "namespace": identity.Namespace}), @@ -147,7 +149,7 @@ func (s *SQLCommon) UpsertIdentity(ctx context.Context, identity *core.Identity, } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) identityResult(ctx context.Context, row *sql.Rows) (*core.Identity, error) { @@ -175,7 +177,7 @@ func (s *SQLCommon) identityResult(ctx context.Context, row *sql.Rows) (*core.Id func (s *SQLCommon) getIdentityPred(ctx context.Context, desc string, pred interface{}) (identity *core.Identity, err error) { - rows, _, err := s.query(ctx, identitiesTable, + rows, _, err := s.Query(ctx, identitiesTable, sq.Select(identityColumns...). From(identitiesTable). Where(pred), @@ -205,14 +207,14 @@ func (s *SQLCommon) GetIdentityByID(ctx context.Context, namespace string, id *f return s.getIdentityPred(ctx, id.String(), sq.Eq{"id": id, "namespace": namespace}) } -func (s *SQLCommon) GetIdentities(ctx context.Context, namespace string, filter database.Filter) (identities []*core.Identity, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetIdentities(ctx context.Context, namespace string, filter ffapi.Filter) (identities []*core.Identity, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(identityColumns...).From(identitiesTable), filter, identityFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(identityColumns...).From(identitiesTable), filter, identityFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, identitiesTable, query) + rows, tx, err := s.Query(ctx, identitiesTable, query) if err != nil { return nil, nil, err } @@ -227,6 +229,6 @@ func (s *SQLCommon) GetIdentities(ctx context.Context, namespace string, filter identities = append(identities, d) } - return identities, s.queryRes(ctx, identitiesTable, tx, fop, fi), err + return identities, s.QueryRes(ctx, identitiesTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/identity_sql_test.go b/internal/database/sqlcommon/identity_sql_test.go index 01f84137a8..75d7bef336 100644 --- a/internal/database/sqlcommon/identity_sql_test.go +++ b/internal/database/sqlcommon/identity_sql_test.go @@ -122,7 +122,7 @@ func TestUpsertIdentityFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertIdentity(context.Background(), &core.Identity{}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -136,7 +136,7 @@ func TestUpsertIdentityFailSelect(t *testing.T) { ID: fftypes.NewUUID(), }, }, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -151,7 +151,7 @@ func TestUpsertIdentityFailInsert(t *testing.T) { ID: fftypes.NewUUID(), }, }, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -167,7 +167,7 @@ func TestUpsertIdentityFailUpdate(t *testing.T) { ID: fftypes.NewUUID(), }, }, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -182,7 +182,7 @@ func TestUpsertIdentityFailCommit(t *testing.T) { ID: fftypes.NewUUID(), }, }, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -190,7 +190,7 @@ func TestGetIdentityByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetIdentityByID(context.Background(), "ns1", fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -198,7 +198,7 @@ func TestGetIdentityByNameSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetIdentityByName(context.Background(), core.IdentityTypeOrg, "ff_system", "org1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -206,7 +206,7 @@ func TestGetIdentityByIdentitySelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetIdentityByDID(context.Background(), "ns1", "did:firefly:org/org1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -232,7 +232,7 @@ func TestGetIdentityQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.IdentityQueryFactory.NewFilter(context.Background()).Eq("did", "") _, _, err := s.GetIdentities(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/message_sql.go b/internal/database/sqlcommon/message_sql.go index bb5d7448e3..a3e97659e5 100644 --- a/internal/database/sqlcommon/message_sql.go +++ b/internal/database/sqlcommon/message_sql.go @@ -22,6 +22,8 @@ import ( "fmt" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -64,8 +66,8 @@ var ( const messagesTable = "messages" const messagesDataJoinTable = "messages_data" -func (s *SQLCommon) attemptMessageUpdate(ctx context.Context, tx *txWrapper, message *core.Message) (int64, error) { - return s.updateTx(ctx, messagesTable, tx, +func (s *SQLCommon) attemptMessageUpdate(ctx context.Context, tx *dbsql.TXWrapper, message *core.Message) (int64, error) { + return s.UpdateTx(ctx, messagesTable, tx, sq.Update(messagesTable). Set("cid", message.Header.CID). Set("mtype", string(message.Header.Type)). @@ -118,8 +120,8 @@ func (s *SQLCommon) setMessageInsertValues(query sq.InsertBuilder, message *core ) } -func (s *SQLCommon) attemptMessageInsert(ctx context.Context, tx *txWrapper, message *core.Message, requestConflictEmptyResult bool) (err error) { - message.Sequence, err = s.insertTxExt(ctx, messagesTable, tx, +func (s *SQLCommon) attemptMessageInsert(ctx context.Context, tx *dbsql.TXWrapper, message *core.Message, requestConflictEmptyResult bool) (err error) { + message.Sequence, err = s.InsertTxExt(ctx, messagesTable, tx, s.setMessageInsertValues(sq.Insert(messagesTable).Columns(msgColumns...), message), func() { s.callbacks.OrderedUUIDCollectionNSEvent(database.CollectionMessages, core.ChangeEventTypeCreated, message.LocalNamespace, message.Header.ID, message.Sequence) @@ -128,11 +130,11 @@ func (s *SQLCommon) attemptMessageInsert(ctx context.Context, tx *txWrapper, mes } func (s *SQLCommon) UpsertMessage(ctx context.Context, message *core.Message, optimization database.UpsertOptimization, hooks ...database.PostCompletionHook) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // This is a performance critical function, as we stream data into the database for every message, in every batch. // @@ -151,8 +153,8 @@ func (s *SQLCommon) UpsertMessage(ctx context.Context, message *core.Message, op if !optimized { // Do a select within the transaction to detemine if the UUID already exists - msgRows, _, err := s.queryTx(ctx, messagesTable, tx, - sq.Select("hash", sequenceColumn). + msgRows, _, err := s.QueryTx(ctx, messagesTable, tx, + sq.Select("hash", s.SequenceColumn()). From(messagesTable). Where(sq.Eq{"id": message.Header.ID, "namespace_local": message.LocalNamespace}), ) @@ -195,19 +197,19 @@ func (s *SQLCommon) UpsertMessage(ctx context.Context, message *core.Message, op } for _, hook := range hooks { - s.postCommitEvent(tx, hook) + tx.AddPostCommitHook(hook) } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) InsertMessages(ctx context.Context, messages []*core.Message, hooks ...database.PostCompletionHook) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) if s.features.MultiRowInsert { msgQuery := sq.Insert(messagesTable).Columns(msgColumns...) @@ -229,7 +231,7 @@ func (s *SQLCommon) InsertMessages(ctx context.Context, messages []*core.Message sequences := make([]int64, len(messages)) // Use a single multi-row insert for the messages - err := s.insertTxRows(ctx, messagesTable, tx, msgQuery, func() { + err := s.InsertTxRows(ctx, messagesTable, tx, msgQuery, func() { for i, message := range messages { message.Sequence = sequences[i] s.callbacks.OrderedUUIDCollectionNSEvent(database.CollectionMessages, core.ChangeEventTypeCreated, message.LocalNamespace, message.Header.ID, message.Sequence) @@ -242,7 +244,7 @@ func (s *SQLCommon) InsertMessages(ctx context.Context, messages []*core.Message // Use a single multi-row insert for the data refs if dataRefCount > 0 { dataRefSeqs := make([]int64, dataRefCount) - err = s.insertTxRows(ctx, messagesDataJoinTable, tx, dataRefQuery, nil, dataRefSeqs, false) + err = s.InsertTxRows(ctx, messagesDataJoinTable, tx, dataRefQuery, nil, dataRefSeqs, false) if err != nil { return err } @@ -262,22 +264,22 @@ func (s *SQLCommon) InsertMessages(ctx context.Context, messages []*core.Message } for _, hook := range hooks { - s.postCommitEvent(tx, hook) + tx.AddPostCommitHook(hook) } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } // In SQL update+bump is a delete+insert within a TX func (s *SQLCommon) ReplaceMessage(ctx context.Context, message *core.Message) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - if err := s.deleteTx(ctx, messagesTable, tx, + if err := s.DeleteTx(ctx, messagesTable, tx, sq.Delete(messagesTable). Where(sq.And{ sq.Eq{"id": message.Header.ID, "namespace_local": message.LocalNamespace}, @@ -294,14 +296,14 @@ func (s *SQLCommon) ReplaceMessage(ctx context.Context, message *core.Message) ( // Note there is no call to updateMessageDataRefs as the data refs are not allowed to change, // and are correlated by UUID (not sequence) - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } -func (s *SQLCommon) updateMessageDataRefs(ctx context.Context, tx *txWrapper, message *core.Message, recreateDatarefs bool) error { +func (s *SQLCommon) updateMessageDataRefs(ctx context.Context, tx *dbsql.TXWrapper, message *core.Message, recreateDatarefs bool) error { if recreateDatarefs { // Delete all the existing references, to replace them with new ones below - if err := s.deleteTx(ctx, messagesDataJoinTable, tx, + if err := s.DeleteTx(ctx, messagesDataJoinTable, tx, sq.Delete(messagesDataJoinTable). Where(sq.And{ sq.Eq{"message_id": message.Header.ID, "namespace": message.LocalNamespace}, @@ -320,7 +322,7 @@ func (s *SQLCommon) updateMessageDataRefs(ctx context.Context, tx *txWrapper, me return i18n.NewError(ctx, coremsgs.MsgMissingDataHashIndex, msgDataRefIDx) } // Add the linkage - if _, err := s.insertTx(ctx, messagesDataJoinTable, tx, + if _, err := s.InsertTx(ctx, messagesDataJoinTable, tx, sq.Insert(messagesDataJoinTable). Columns( "namespace", @@ -360,7 +362,7 @@ func (s *SQLCommon) loadDataRefs(ctx context.Context, namespace string, msgs []* } } - existingRefs, _, err := s.query(ctx, messagesDataJoinTable, + existingRefs, _, err := s.Query(ctx, messagesDataJoinTable, sq.Select( "message_id", "data_id", @@ -437,8 +439,8 @@ func (s *SQLCommon) msgResult(ctx context.Context, row *sql.Rows) (*core.Message func (s *SQLCommon) GetMessageByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Message, err error) { cols := append([]string{}, msgColumns...) - cols = append(cols, sequenceColumn) - rows, _, err := s.query(ctx, messagesTable, + cols = append(cols, s.SequenceColumn()) + rows, _, err := s.Query(ctx, messagesTable, sq.Select(cols...). From(messagesTable). Where(sq.Eq{"id": id, "namespace_local": namespace}), @@ -466,12 +468,12 @@ func (s *SQLCommon) GetMessageByID(ctx context.Context, namespace string, id *ff return msg, nil } -func (s *SQLCommon) getMessagesQuery(ctx context.Context, namespace string, query sq.SelectBuilder, fop sq.Sqlizer, fi *database.FilterInfo, allowCount bool) (message []*core.Message, fr *database.FilterResult, err error) { +func (s *SQLCommon) getMessagesQuery(ctx context.Context, namespace string, query sq.SelectBuilder, fop sq.Sqlizer, fi *ffapi.FilterInfo, allowCount bool) (message []*core.Message, fr *ffapi.FilterResult, err error) { if fi.Count && !allowCount { return nil, nil, i18n.NewError(ctx, coremsgs.MsgFilterCountNotSupported) } - rows, tx, err := s.query(ctx, messagesTable, query) + rows, tx, err := s.Query(ctx, messagesTable, query) if err != nil { return nil, nil, err } @@ -492,20 +494,20 @@ func (s *SQLCommon) getMessagesQuery(ctx context.Context, namespace string, quer return nil, nil, err } } - return msgs, s.queryRes(ctx, messagesTable, tx, fop, fi), err + return msgs, s.QueryRes(ctx, messagesTable, tx, fop, fi), err } -func (s *SQLCommon) GetMessageIDs(ctx context.Context, namespace string, filter database.Filter) (ids []*core.IDAndSequence, err error) { - query, _, _, err := s.filterSelect(ctx, "", sq.Select("id", sequenceColumn).From(messagesTable), filter, msgFilterFieldMap, +func (s *SQLCommon) GetMessageIDs(ctx context.Context, namespace string, filter ffapi.Filter) (ids []*core.IDAndSequence, err error) { + query, _, _, err := s.FilterSelect(ctx, "", sq.Select("id", s.SequenceColumn()).From(messagesTable), filter, msgFilterFieldMap, []interface{}{ - &database.SortField{Field: "confirmed", Descending: true, Nulls: database.NullsFirst}, + &ffapi.SortField{Field: "confirmed", Descending: true, Nulls: ffapi.NullsFirst}, "created", }, sq.Eq{"namespace_local": namespace}) if err != nil { return nil, err } - rows, _, err := s.query(ctx, messagesTable, query) + rows, _, err := s.Query(ctx, messagesTable, query) if err != nil { return nil, err } @@ -535,7 +537,7 @@ func (s *SQLCommon) GetBatchIDsForMessages(ctx context.Context, namespace string } func (s *SQLCommon) queryBatchIDs(ctx context.Context, query sq.SelectBuilder) (batchIDs []*fftypes.UUID, err error) { - rows, _, err := s.query(ctx, messagesTable, query) + rows, _, err := s.Query(ctx, messagesTable, query) if err != nil { return nil, err } @@ -556,13 +558,13 @@ func (s *SQLCommon) queryBatchIDs(ctx context.Context, query sq.SelectBuilder) ( return batchIDs, nil } -func (s *SQLCommon) GetMessages(ctx context.Context, namespace string, filter database.Filter) (message []*core.Message, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetMessages(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Message, fr *ffapi.FilterResult, err error) { cols := append([]string{}, msgColumns...) - cols = append(cols, sequenceColumn) - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(cols...).From(messagesTable), filter, msgFilterFieldMap, + cols = append(cols, s.SequenceColumn()) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(cols...).From(messagesTable), filter, msgFilterFieldMap, []interface{}{ - &database.SortField{Field: "confirmed", Descending: true, Nulls: database.NullsFirst}, - &database.SortField{Field: "created", Descending: true}, + &ffapi.SortField{Field: "confirmed", Descending: true, Nulls: ffapi.NullsFirst}, + &ffapi.SortField{Field: "created", Descending: true}, }, sq.Eq{"namespace_local": namespace}) if err != nil { return nil, nil, err @@ -570,13 +572,13 @@ func (s *SQLCommon) GetMessages(ctx context.Context, namespace string, filter da return s.getMessagesQuery(ctx, namespace, query, fop, fi, true) } -func (s *SQLCommon) GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter database.Filter) (message []*core.Message, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter ffapi.Filter) (message []*core.Message, fr *ffapi.FilterResult, err error) { cols := make([]string, len(msgColumns)+1) for i, col := range msgColumns { cols[i] = fmt.Sprintf("m.%s", col) } cols[len(msgColumns)] = "m.seq" - query, fop, fi, err := s.filterSelect( + query, fop, fi, err := s.FilterSelect( ctx, "m", sq.Select(cols...).From("messages_data AS md"), filter, msgFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"md.data_id": dataID, "md.namespace": namespace}) @@ -588,32 +590,32 @@ func (s *SQLCommon) GetMessagesForData(ctx context.Context, namespace string, da return s.getMessagesQuery(ctx, namespace, query, fop, fi, false) } -func (s *SQLCommon) UpdateMessage(ctx context.Context, namespace string, msgid *fftypes.UUID, update database.Update) (err error) { +func (s *SQLCommon) UpdateMessage(ctx context.Context, namespace string, msgid *fftypes.UUID, update ffapi.Update) (err error) { return s.UpdateMessages(ctx, namespace, database.MessageQueryFactory.NewFilter(ctx).Eq("id", msgid), update) } -func (s *SQLCommon) UpdateMessages(ctx context.Context, namespace string, filter database.Filter, update database.Update) (err error) { +func (s *SQLCommon) UpdateMessages(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(messagesTable).Where(sq.Eq{"namespace_local": namespace}), update, msgFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(messagesTable).Where(sq.Eq{"namespace_local": namespace}), update, msgFilterFieldMap) if err != nil { return err } - query, err = s.filterUpdate(ctx, query, filter, msgFilterFieldMap) + query, err = s.FilterUpdate(ctx, query, filter, msgFilterFieldMap) if err != nil { return err } - _, err = s.updateTx(ctx, messagesTable, tx, query, nil /* no change events filter based update */) + _, err = s.UpdateTx(ctx, messagesTable, tx, query, nil /* no change events filter based update */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/message_sql_test.go b/internal/database/sqlcommon/message_sql_test.go index 31fd1a06c2..19ffa8e21a 100644 --- a/internal/database/sqlcommon/message_sql_test.go +++ b/internal/database/sqlcommon/message_sql_test.go @@ -236,7 +236,7 @@ func TestUpsertMessageFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertMessage(context.Background(), &core.Message{}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -247,7 +247,7 @@ func TestUpsertMessageFailSelect(t *testing.T) { mock.ExpectRollback() msgID := fftypes.NewUUID() err := s.UpsertMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -259,7 +259,7 @@ func TestUpsertMessageFailInsert(t *testing.T) { mock.ExpectRollback() msgID := fftypes.NewUUID() err := s.UpsertMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -271,7 +271,7 @@ func TestUpsertMessageFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -284,7 +284,7 @@ func TestUpsertMessageFailUpdateRefs(t *testing.T) { mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -296,7 +296,7 @@ func TestUpsertMessageFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -304,7 +304,7 @@ func TestInsertMessagesBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertMessages(context.Background(), []*core.Message{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -320,11 +320,11 @@ func TestInsertMessagesMultiRowOK(t *testing.T) { s.callbacks.On("OrderedUUIDCollectionNSEvent", database.CollectionMessages, core.ChangeEventTypeCreated, "ns1", msg2.Header.ID, int64(1002)) mock.ExpectBegin() - mock.ExpectQuery("INSERT.*messages").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*messages").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1001)). AddRow(int64(1002)), ) - mock.ExpectQuery("INSERT.*messages_data").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*messages_data").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1003)). AddRow(int64(1004)), ) @@ -347,10 +347,10 @@ func TestInsertMessagesMultiRowDataRefsFail(t *testing.T) { msg1 := &core.Message{Header: core.MessageHeader{ID: fftypes.NewUUID(), Namespace: "ns1"}, Data: core.DataRefs{{ID: fftypes.NewUUID()}}} mock.ExpectBegin() - mock.ExpectQuery("INSERT.*messages").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}).AddRow(int64(1001))) + mock.ExpectQuery("INSERT.*messages").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}).AddRow(int64(1001))) mock.ExpectQuery("INSERT.*messages_data").WillReturnError(fmt.Errorf("pop")) err := s.InsertMessages(context.Background(), []*core.Message{msg1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -363,7 +363,7 @@ func TestInsertMessagesMultiRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertMessages(context.Background(), []*core.Message{msg1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -374,7 +374,7 @@ func TestInsertMessagesSingleRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertMessages(context.Background(), []*core.Message{msg1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -386,7 +386,7 @@ func TestInsertMessagesSingleRowFailDataRefs(t *testing.T) { mock.ExpectExec("INSERT.*messages").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectExec("INSERT.*messages_data").WillReturnError(fmt.Errorf("pop")) err := s.InsertMessages(context.Background(), []*core.Message{msg1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -396,7 +396,7 @@ func TestReplaceMessageFailBegin(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) msgID := fftypes.NewUUID() err := s.ReplaceMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -407,7 +407,7 @@ func TestReplaceMessageFailDelete(t *testing.T) { mock.ExpectRollback() msgID := fftypes.NewUUID() err := s.ReplaceMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -419,7 +419,7 @@ func TestReplaceMessageFailInsert(t *testing.T) { mock.ExpectRollback() msgID := fftypes.NewUUID() err := s.ReplaceMessage(context.Background(), &core.Message{Header: core.MessageHeader{ID: msgID}}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -427,8 +427,9 @@ func TestUpdateMessageDataRefsNilID(t *testing.T) { s, mock := newMockProvider().init() msgID := fftypes.NewUUID() mock.ExpectBegin() - tx, _ := s.db.Begin() - err := s.updateMessageDataRefs(context.Background(), &txWrapper{sqlTX: tx}, &core.Message{ + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + err = s.updateMessageDataRefs(ctx, tx, &core.Message{ Header: core.MessageHeader{ID: msgID}, Data: []*core.DataRef{{ID: nil}}, }, false) @@ -440,8 +441,9 @@ func TestUpdateMessageDataRefsNilHash(t *testing.T) { s, mock := newMockProvider().init() msgID := fftypes.NewUUID() mock.ExpectBegin() - tx, _ := s.db.Begin() - err := s.updateMessageDataRefs(context.Background(), &txWrapper{sqlTX: tx}, &core.Message{ + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + err = s.updateMessageDataRefs(ctx, tx, &core.Message{ Header: core.MessageHeader{ID: msgID}, Data: []*core.DataRef{{ID: fftypes.NewUUID()}}, }, false) @@ -453,12 +455,13 @@ func TestUpdateMessageDataDeleteFail(t *testing.T) { s, mock := newMockProvider().init() msgID := fftypes.NewUUID() mock.ExpectBegin() - tx, _ := s.db.Begin() + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) - err := s.updateMessageDataRefs(context.Background(), &txWrapper{sqlTX: tx}, &core.Message{ + err = s.updateMessageDataRefs(ctx, tx, &core.Message{ Header: core.MessageHeader{ID: msgID}, }, true) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -468,13 +471,14 @@ func TestUpdateMessageDataAddFail(t *testing.T) { dataID := fftypes.NewUUID() dataHash := fftypes.NewRandB32() mock.ExpectBegin() - tx, _ := s.db.Begin() + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) - err := s.updateMessageDataRefs(context.Background(), &txWrapper{sqlTX: tx}, &core.Message{ + err = s.updateMessageDataRefs(ctx, tx, &core.Message{ Header: core.MessageHeader{ID: msgID}, Data: []*core.DataRef{{ID: dataID, Hash: dataHash}}, }, false) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -487,7 +491,7 @@ func TestLoadMessageDataRefsQueryFail(t *testing.T) { Header: core.MessageHeader{ID: msgID}, }, }) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -520,7 +524,7 @@ func TestGetMessageByIDSelectFail(t *testing.T) { msgID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetMessageByID(context.Background(), "ns1", msgID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -553,7 +557,7 @@ func TestGetMessageByIDLoadRefsFail(t *testing.T) { AddRow(msgID.String(), nil, core.MessageTypeBroadcast, "author1", "0x12345", 0, "ns1", "ns1", "t1", "c1", nil, b32.String(), b32.String(), b32.String(), "confirmed", 0, "pin", nil, "bob", 0)) mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetMessageByID(context.Background(), "ns1", msgID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -569,7 +573,7 @@ func TestGetMessagesQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.MessageQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetMessages(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -601,7 +605,7 @@ func TestGetMessagesLoadRefsFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.MessageQueryFactory.NewFilter(context.Background()).Gt("confirmed", "0") _, _, err := s.GetMessages(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -610,7 +614,7 @@ func TestMessageUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.MessageQueryFactory.NewUpdate(context.Background()).Set("id", "anything") err := s.UpdateMessage(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestGetMessageIDsQueryFail(t *testing.T) { @@ -618,7 +622,7 @@ func TestGetMessageIDsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.MessageQueryFactory.NewFilter(context.Background()).Eq("id", "") _, err := s.GetMessageIDs(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -663,7 +667,7 @@ func TestMessageUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.MessageQueryFactory.NewUpdate(context.Background()).Set("group", fftypes.NewRandB32()) err := s.UpdateMessage(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } func TestGetBatchIDsForMessagesSelectFail(t *testing.T) { @@ -671,7 +675,7 @@ func TestGetBatchIDsForMessagesSelectFail(t *testing.T) { msgID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetBatchIDsForMessages(context.Background(), "ns1", []*fftypes.UUID{msgID}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/namespace_sql.go b/internal/database/sqlcommon/namespace_sql.go index 2e7a7b73ee..3ad5b9c989 100644 --- a/internal/database/sqlcommon/namespace_sql.go +++ b/internal/database/sqlcommon/namespace_sql.go @@ -40,16 +40,16 @@ var ( const namespacesTable = "namespaces" func (s *SQLCommon) UpsertNamespace(ctx context.Context, namespace *core.Namespace, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) existing := false if allowExisting { // Do a select within the transaction to determine if the UUID already exists - namespaceRows, _, err := s.queryTx(ctx, namespacesTable, tx, + namespaceRows, _, err := s.QueryTx(ctx, namespacesTable, tx, sq.Select("seq"). From(namespacesTable). Where(sq.Eq{"name": namespace.Name}), @@ -63,7 +63,7 @@ func (s *SQLCommon) UpsertNamespace(ctx context.Context, namespace *core.Namespa if existing { // Update the namespace - if _, err = s.updateTx(ctx, namespacesTable, tx, + if _, err = s.UpdateTx(ctx, namespacesTable, tx, sq.Update(namespacesTable). Set("remote_name", namespace.NetworkName). Set("description", namespace.Description). @@ -75,7 +75,7 @@ func (s *SQLCommon) UpsertNamespace(ctx context.Context, namespace *core.Namespa return err } } else { - if _, err = s.insertTx(ctx, namespacesTable, tx, + if _, err = s.InsertTx(ctx, namespacesTable, tx, sq.Insert(namespacesTable). Columns(namespaceColumns...). Values( @@ -91,7 +91,7 @@ func (s *SQLCommon) UpsertNamespace(ctx context.Context, namespace *core.Namespa } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) namespaceResult(ctx context.Context, row *sql.Rows) (*core.Namespace, error) { @@ -110,7 +110,7 @@ func (s *SQLCommon) namespaceResult(ctx context.Context, row *sql.Rows) (*core.N } func (s *SQLCommon) getNamespaceEq(ctx context.Context, eq sq.Eq, textName string) (message *core.Namespace, err error) { - rows, _, err := s.query(ctx, namespacesTable, + rows, _, err := s.Query(ctx, namespacesTable, sq.Select(namespaceColumns...). From(namespacesTable). Where(eq), diff --git a/internal/database/sqlcommon/namespace_sql_test.go b/internal/database/sqlcommon/namespace_sql_test.go index b0928537a8..e7e5bac8d1 100644 --- a/internal/database/sqlcommon/namespace_sql_test.go +++ b/internal/database/sqlcommon/namespace_sql_test.go @@ -83,7 +83,7 @@ func TestUpsertNamespaceFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertNamespace(context.Background(), &core.Namespace{}, true) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -93,7 +93,7 @@ func TestUpsertNamespaceFailSelect(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertNamespace(context.Background(), &core.Namespace{Name: "name1"}, true) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -104,7 +104,7 @@ func TestUpsertNamespaceFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertNamespace(context.Background(), &core.Namespace{Name: "name1"}, true) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -116,7 +116,7 @@ func TestUpsertNamespaceFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertNamespace(context.Background(), &core.Namespace{Name: "name1"}, true) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -127,7 +127,7 @@ func TestUpsertNamespaceFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertNamespace(context.Background(), &core.Namespace{Name: "name1"}, true) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -135,7 +135,7 @@ func TestGetNamespaceByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetNamespace(context.Background(), "name1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/nextpin_sql.go b/internal/database/sqlcommon/nextpin_sql.go index fa5a47ebba..ee9f1ae396 100644 --- a/internal/database/sqlcommon/nextpin_sql.go +++ b/internal/database/sqlcommon/nextpin_sql.go @@ -21,11 +21,11 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) var ( @@ -41,13 +41,13 @@ var ( const nextpinsTable = "nextpins" func (s *SQLCommon) InsertNextPin(ctx context.Context, nextpin *core.NextPin) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - sequence, err := s.insertTx(ctx, nextpinsTable, tx, + sequence, err := s.InsertTx(ctx, nextpinsTable, tx, sq.Insert(nextpinsTable). Columns(nextpinColumns...). Values( @@ -64,7 +64,7 @@ func (s *SQLCommon) InsertNextPin(ctx context.Context, nextpin *core.NextPin) (e } nextpin.Sequence = sequence - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) nextpinResult(ctx context.Context, row *sql.Rows) (*core.NextPin, error) { @@ -86,9 +86,9 @@ func (s *SQLCommon) nextpinResult(ctx context.Context, row *sql.Rows) (*core.Nex func (s *SQLCommon) GetNextPinsForContext(ctx context.Context, namespace string, context *fftypes.Bytes32) (message []*core.NextPin, err error) { cols := append([]string{}, nextpinColumns...) - cols = append(cols, sequenceColumn) + cols = append(cols, s.SequenceColumn()) - rows, _, err := s.query(ctx, nextpinsTable, sq.Select(cols...).From(nextpinsTable). + rows, _, err := s.Query(ctx, nextpinsTable, sq.Select(cols...).From(nextpinsTable). Where(sq.And{ sq.Eq{"context": context}, sq.Or{sq.Eq{"namespace": namespace}, sq.Eq{"namespace": nil}}, // old entries will have NULL for namespace @@ -111,25 +111,25 @@ func (s *SQLCommon) GetNextPinsForContext(ctx context.Context, namespace string, } -func (s *SQLCommon) UpdateNextPin(ctx context.Context, namespace string, sequence int64, update database.Update) (err error) { +func (s *SQLCommon) UpdateNextPin(ctx context.Context, namespace string, sequence int64, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(nextpinsTable), update, pinFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(nextpinsTable), update, pinFilterFieldMap) if err != nil { return err } query = query.Set("namespace", namespace) // always populate namespace (to migrate the table over time) - query = query.Where(sq.Eq{sequenceColumn: sequence}) + query = query.Where(sq.Eq{s.SequenceColumn(): sequence}) - _, err = s.updateTx(ctx, nextpinsTable, tx, query, nil /* no change events for next pins */) + _, err = s.UpdateTx(ctx, nextpinsTable, tx, query, nil /* no change events for next pins */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/nextpin_sql_test.go b/internal/database/sqlcommon/nextpin_sql_test.go index f05d909e00..5f5ba1d330 100644 --- a/internal/database/sqlcommon/nextpin_sql_test.go +++ b/internal/database/sqlcommon/nextpin_sql_test.go @@ -79,7 +79,7 @@ func TestUpsertNextPinFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertNextPin(context.Background(), &core.NextPin{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -89,7 +89,7 @@ func TestUpsertNextPinFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.InsertNextPin(context.Background(), &core.NextPin{Context: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -99,7 +99,7 @@ func TestUpsertNextPinFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertNextPin(context.Background(), &core.NextPin{Context: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -107,7 +107,7 @@ func TestGetNextPinQueryFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetNextPinsForContext(context.Background(), "ns", fftypes.NewRandB32()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -124,7 +124,7 @@ func TestNextPinUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.NextPinQueryFactory.NewUpdate(context.Background()).Set("context", "anything") err := s.UpdateNextPin(context.Background(), "ns", 12345, u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestNextPinUpdateBuildQueryFail(t *testing.T) { @@ -142,5 +142,5 @@ func TestNextPinUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.NextPinQueryFactory.NewUpdate(context.Background()).Set("context", fftypes.NewRandB32()) err := s.UpdateNextPin(context.Background(), "ns", 12345, u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/nonce_sql.go b/internal/database/sqlcommon/nonce_sql.go index 8b9a42437b..e42efca3fb 100644 --- a/internal/database/sqlcommon/nonce_sql.go +++ b/internal/database/sqlcommon/nonce_sql.go @@ -21,12 +21,12 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) var ( @@ -40,14 +40,14 @@ var ( const noncesTable = "nonces" func (s *SQLCommon) UpdateNonce(ctx context.Context, nonce *core.Nonce) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // Update the nonce - if _, err = s.updateTx(ctx, noncesTable, tx, + if _, err = s.UpdateTx(ctx, noncesTable, tx, sq.Update(noncesTable). Set("nonce", nonce.Nonce). Where(sq.Eq{"hash": nonce.Hash}), @@ -56,18 +56,18 @@ func (s *SQLCommon) UpdateNonce(ctx context.Context, nonce *core.Nonce) (err err return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) InsertNonce(ctx context.Context, nonce *core.Nonce) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // Insert the nonce - if _, err = s.insertTx(ctx, noncesTable, tx, + if _, err = s.InsertTx(ctx, noncesTable, tx, sq.Insert(noncesTable). Columns(nonceColumns...). Values( @@ -79,7 +79,7 @@ func (s *SQLCommon) InsertNonce(ctx context.Context, nonce *core.Nonce) (err err return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) nonceResult(ctx context.Context, row *sql.Rows) (*core.Nonce, error) { @@ -96,7 +96,7 @@ func (s *SQLCommon) nonceResult(ctx context.Context, row *sql.Rows) (*core.Nonce func (s *SQLCommon) GetNonce(ctx context.Context, hash *fftypes.Bytes32) (message *core.Nonce, err error) { - rows, _, err := s.query(ctx, noncesTable, + rows, _, err := s.Query(ctx, noncesTable, sq.Select(nonceColumns...). From(noncesTable). Where(sq.Eq{"hash": hash}), @@ -119,14 +119,14 @@ func (s *SQLCommon) GetNonce(ctx context.Context, hash *fftypes.Bytes32) (messag return nonce, nil } -func (s *SQLCommon) GetNonces(ctx context.Context, filter database.Filter) (message []*core.Nonce, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetNonces(ctx context.Context, filter ffapi.Filter) (message []*core.Nonce, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(nonceColumns...).From(noncesTable), filter, nonceFilterFieldMap, []interface{}{"sequence"}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(nonceColumns...).From(noncesTable), filter, nonceFilterFieldMap, []interface{}{"sequence"}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, noncesTable, query) + rows, tx, err := s.Query(ctx, noncesTable, query) if err != nil { return nil, nil, err } @@ -141,24 +141,24 @@ func (s *SQLCommon) GetNonces(ctx context.Context, filter database.Filter) (mess nonce = append(nonce, d) } - return nonce, s.queryRes(ctx, noncesTable, tx, fop, fi), err + return nonce, s.QueryRes(ctx, noncesTable, tx, fop, fi), err } func (s *SQLCommon) DeleteNonce(ctx context.Context, hash *fftypes.Bytes32) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - err = s.deleteTx(ctx, noncesTable, tx, sq.Delete(noncesTable).Where(sq.Eq{ + err = s.DeleteTx(ctx, noncesTable, tx, sq.Delete(noncesTable).Where(sq.Eq{ "hash": hash, }), nil /* no change events for nonces */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/nonce_sql_test.go b/internal/database/sqlcommon/nonce_sql_test.go index 9f6cd4bbdf..e24543d4ff 100644 --- a/internal/database/sqlcommon/nonce_sql_test.go +++ b/internal/database/sqlcommon/nonce_sql_test.go @@ -96,7 +96,7 @@ func TestInsertNonceFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertNonce(context.Background(), &core.Nonce{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -106,7 +106,7 @@ func TestInsertNonceFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.InsertNonce(context.Background(), &core.Nonce{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -114,7 +114,7 @@ func TestUpdateNonceFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpdateNonce(context.Background(), &core.Nonce{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -124,7 +124,7 @@ func TestUpdateNonceFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpdateNonce(context.Background(), &core.Nonce{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -132,7 +132,7 @@ func TestGetNonceSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetNonce(context.Background(), fftypes.NewRandB32()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -158,7 +158,7 @@ func TestGetNonceQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.NonceQueryFactory.NewFilter(context.Background()).Eq("hash", "") _, _, err := s.GetNonces(context.Background(), f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -182,7 +182,7 @@ func TestNonceDeleteBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.DeleteNonce(context.Background(), fftypes.NewRandB32()) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestNonceDeleteFail(t *testing.T) { @@ -191,5 +191,5 @@ func TestNonceDeleteFail(t *testing.T) { mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.DeleteNonce(context.Background(), fftypes.NewRandB32()) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) } diff --git a/internal/database/sqlcommon/offset_sql.go b/internal/database/sqlcommon/offset_sql.go index a194e50b9d..d84a3cb312 100644 --- a/internal/database/sqlcommon/offset_sql.go +++ b/internal/database/sqlcommon/offset_sql.go @@ -21,11 +21,11 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) var ( @@ -42,17 +42,17 @@ var ( const offsetsTable = "offsets" func (s *SQLCommon) UpsertOffset(ctx context.Context, offset *core.Offset, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) existing := false if allowExisting { // Do a select within the transaction to detemine if the UUID already exists - offsetRows, _, err := s.queryTx(ctx, offsetsTable, tx, - sq.Select(sequenceColumn). + offsetRows, _, err := s.QueryTx(ctx, offsetsTable, tx, + sq.Select(s.SequenceColumn()). From(offsetsTable). Where( sq.Eq{"otype": offset.Type, @@ -75,18 +75,18 @@ func (s *SQLCommon) UpsertOffset(ctx context.Context, offset *core.Offset, allow if existing { // Update the offset - if _, err = s.updateTx(ctx, offsetsTable, tx, + if _, err = s.UpdateTx(ctx, offsetsTable, tx, sq.Update(offsetsTable). Set("otype", string(offset.Type)). Set("name", offset.Name). Set("current", offset.Current). - Where(sq.Eq{sequenceColumn: offset.RowID}), + Where(sq.Eq{s.SequenceColumn(): offset.RowID}), nil, // offsets do not have events ); err != nil { return err } } else { - if offset.RowID, err = s.insertTx(ctx, offsetsTable, tx, + if offset.RowID, err = s.InsertTx(ctx, offsetsTable, tx, sq.Insert(offsetsTable). Columns(offsetColumns...). Values( @@ -100,7 +100,7 @@ func (s *SQLCommon) UpsertOffset(ctx context.Context, offset *core.Offset, allow } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) offsetResult(ctx context.Context, row *sql.Rows) (*core.Offset, error) { @@ -109,7 +109,7 @@ func (s *SQLCommon) offsetResult(ctx context.Context, row *sql.Rows) (*core.Offs &offset.Type, &offset.Name, &offset.Current, - &offset.RowID, // must include sequenceColumn in colum list + &offset.RowID, // must include s.SequenceColumn() in colum list ) if err != nil { return nil, i18n.WrapError(ctx, err, coremsgs.MsgDBReadErr, offsetsTable) @@ -120,8 +120,8 @@ func (s *SQLCommon) offsetResult(ctx context.Context, row *sql.Rows) (*core.Offs func (s *SQLCommon) GetOffset(ctx context.Context, t core.OffsetType, name string) (message *core.Offset, err error) { cols := append([]string{}, offsetColumns...) - cols = append(cols, sequenceColumn) - rows, _, err := s.query(ctx, offsetsTable, + cols = append(cols, s.SequenceColumn()) + rows, _, err := s.Query(ctx, offsetsTable, sq.Select(cols...). From(offsetsTable). Where(sq.Eq{ @@ -147,16 +147,16 @@ func (s *SQLCommon) GetOffset(ctx context.Context, t core.OffsetType, name strin return offset, nil } -func (s *SQLCommon) GetOffsets(ctx context.Context, filter database.Filter) (message []*core.Offset, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetOffsets(ctx context.Context, filter ffapi.Filter) (message []*core.Offset, fr *ffapi.FilterResult, err error) { cols := append([]string{}, offsetColumns...) - cols = append(cols, sequenceColumn) - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(cols...).From(offsetsTable), filter, offsetFilterFieldMap, []interface{}{"sequence"}) + cols = append(cols, s.SequenceColumn()) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(cols...).From(offsetsTable), filter, offsetFilterFieldMap, []interface{}{"sequence"}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, offsetsTable, query) + rows, tx, err := s.Query(ctx, offsetsTable, query) if err != nil { return nil, nil, err } @@ -171,52 +171,52 @@ func (s *SQLCommon) GetOffsets(ctx context.Context, filter database.Filter) (mes offset = append(offset, d) } - return offset, s.queryRes(ctx, offsetsTable, tx, fop, fi), err + return offset, s.QueryRes(ctx, offsetsTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateOffset(ctx context.Context, rowID int64, update database.Update) (err error) { +func (s *SQLCommon) UpdateOffset(ctx context.Context, rowID int64, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(offsetsTable), update, offsetFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(offsetsTable), update, offsetFilterFieldMap) if err != nil { return err } - query = query.Where(sq.Eq{sequenceColumn: rowID}) + query = query.Where(sq.Eq{s.SequenceColumn(): rowID}) - _, err = s.updateTx(ctx, offsetsTable, tx, query, nil /* offsets do not have change events */) + _, err = s.UpdateTx(ctx, offsetsTable, tx, query, nil /* offsets do not have change events */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) DeleteOffset(ctx context.Context, t core.OffsetType, name string) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) offset, err := s.GetOffset(ctx, t, name) if err != nil { return err } if offset != nil { - err = s.deleteTx(ctx, offsetsTable, tx, sq.Delete(offsetsTable).Where(sq.Eq{ - sequenceColumn: offset.RowID, + err = s.DeleteTx(ctx, offsetsTable, tx, sq.Delete(offsetsTable).Where(sq.Eq{ + s.SequenceColumn(): offset.RowID, }), nil /* offsets do not have change events */) if err != nil { return err } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/offset_sql_test.go b/internal/database/sqlcommon/offset_sql_test.go index 9dcbe2ca9b..38a19b342f 100644 --- a/internal/database/sqlcommon/offset_sql_test.go +++ b/internal/database/sqlcommon/offset_sql_test.go @@ -121,7 +121,7 @@ func TestUpsertOffsetFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertOffset(context.Background(), &core.Offset{}, true) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -131,7 +131,7 @@ func TestUpsertOffsetFailSelect(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertOffset(context.Background(), &core.Offset{Name: "name1"}, true) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -152,30 +152,30 @@ func TestUpsertOffsetFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertOffset(context.Background(), &core.Offset{Name: "name1"}, true) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } func TestUpsertOffsetFailUpdate(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(12345))) mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertOffset(context.Background(), &core.Offset{Name: "name1"}, true) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } func TestUpsertOffsetFailCommit(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn})) + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()})) mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertOffset(context.Background(), &core.Offset{Name: "name1"}, true) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -183,7 +183,7 @@ func TestGetOffsetByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetOffset(context.Background(), core.OffsetTypeBatch, "name1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -209,7 +209,7 @@ func TestGetOffsetQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.OffsetQueryFactory.NewFilter(context.Background()).Eq("type", "") _, _, err := s.GetOffsets(context.Background(), f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -234,7 +234,7 @@ func TestOffsetUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.OffsetQueryFactory.NewUpdate(context.Background()).Set("name", "anything") err := s.UpdateOffset(context.Background(), 12345, u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestOffsetUpdateBuildQueryFail(t *testing.T) { @@ -252,14 +252,14 @@ func TestOffsetUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.OffsetQueryFactory.NewUpdate(context.Background()).Set("name", fftypes.NewUUID()) err := s.UpdateOffset(context.Background(), 12345, u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } func TestOffsetDeleteBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.DeleteOffset(context.Background(), core.OffsetTypeSubscription, "sub1") - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestOffsetDeleteSelectFailFail(t *testing.T) { @@ -268,18 +268,18 @@ func TestOffsetDeleteSelectFailFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.DeleteOffset(context.Background(), core.OffsetTypeSubscription, "sub1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) } func TestOffsetDeleteFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin() - cols := append(append([]string{}, offsetColumns...), sequenceColumn) + cols := append(append([]string{}, offsetColumns...), s.SequenceColumn()) mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows(cols).AddRow( core.OffsetTypeSubscription, "sub1", int64(12345), int64(12345), )) mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.DeleteOffset(context.Background(), core.OffsetTypeSubscription, "sub1") - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) } diff --git a/internal/database/sqlcommon/operation_sql.go b/internal/database/sqlcommon/operation_sql.go index 7c1ea42a18..82eba5ed23 100644 --- a/internal/database/sqlcommon/operation_sql.go +++ b/internal/database/sqlcommon/operation_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -55,13 +56,13 @@ var ( const operationsTable = "operations" func (s *SQLCommon) InsertOperation(ctx context.Context, operation *core.Operation, hooks ...database.PostCompletionHook) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - if _, err = s.insertTx(ctx, operationsTable, tx, + if _, err = s.InsertTx(ctx, operationsTable, tx, sq.Insert(operationsTable). Columns(opColumns...). Values( @@ -88,7 +89,7 @@ func (s *SQLCommon) InsertOperation(ctx context.Context, operation *core.Operati return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) opResult(ctx context.Context, row *sql.Rows) (*core.Operation, error) { @@ -115,7 +116,7 @@ func (s *SQLCommon) opResult(ctx context.Context, row *sql.Rows) (*core.Operatio func (s *SQLCommon) GetOperationByID(ctx context.Context, namespace string, id *fftypes.UUID) (operation *core.Operation, err error) { - rows, _, err := s.query(ctx, operationsTable, + rows, _, err := s.Query(ctx, operationsTable, sq.Select(opColumns...). From(operationsTable). Where(sq.Eq{"id": id, "namespace": namespace}), @@ -138,14 +139,14 @@ func (s *SQLCommon) GetOperationByID(ctx context.Context, namespace string, id * return op, nil } -func (s *SQLCommon) GetOperations(ctx context.Context, namespace string, filter database.Filter) (operation []*core.Operation, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetOperations(ctx context.Context, namespace string, filter ffapi.Filter) (operation []*core.Operation, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(opColumns...).From(operationsTable), filter, opFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(opColumns...).From(operationsTable), filter, opFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, operationsTable, query) + rows, tx, err := s.Query(ctx, operationsTable, query) if err != nil { return nil, nil, err } @@ -160,18 +161,18 @@ func (s *SQLCommon) GetOperations(ctx context.Context, namespace string, filter ops = append(ops, op) } - return ops, s.queryRes(ctx, operationsTable, tx, fop, fi), err + return ops, s.QueryRes(ctx, operationsTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateOperation(ctx context.Context, ns string, id *fftypes.UUID, update database.Update) (err error) { +func (s *SQLCommon) UpdateOperation(ctx context.Context, ns string, id *fftypes.UUID, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(operationsTable), update, opFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(operationsTable), update, opFilterFieldMap) if err != nil { return err } @@ -181,7 +182,7 @@ func (s *SQLCommon) UpdateOperation(ctx context.Context, ns string, id *fftypes. sq.Eq{"namespace": ns}, }) - ra, err := s.updateTx(ctx, operationsTable, tx, query, func() { + ra, err := s.UpdateTx(ctx, operationsTable, tx, query, func() { s.callbacks.UUIDCollectionNSEvent(database.CollectionOperations, core.ChangeEventTypeUpdated, ns, id) }) if err != nil { @@ -191,5 +192,5 @@ func (s *SQLCommon) UpdateOperation(ctx context.Context, ns string, id *fftypes. return i18n.NewError(ctx, coremsgs.Msg404NoResult) } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/operation_sql_test.go b/internal/database/sqlcommon/operation_sql_test.go index 7b88210ca0..57584ddaef 100644 --- a/internal/database/sqlcommon/operation_sql_test.go +++ b/internal/database/sqlcommon/operation_sql_test.go @@ -124,7 +124,7 @@ func TestInsertOperationFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertOperation(context.Background(), &core.Operation{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -135,7 +135,7 @@ func TestInsertOperationFailInsert(t *testing.T) { mock.ExpectRollback() operationID := fftypes.NewUUID() err := s.InsertOperation(context.Background(), &core.Operation{ID: operationID}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -146,7 +146,7 @@ func TestInsertOperationFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertOperation(context.Background(), &core.Operation{ID: operationID}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -155,7 +155,7 @@ func TestGetOperationByIDSelectFail(t *testing.T) { operationID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetOperationByID(context.Background(), "ns1", operationID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -183,7 +183,7 @@ func TestGetOperationsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.OperationQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetOperations(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -208,7 +208,7 @@ func TestOperationUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.OperationQueryFactory.NewUpdate(context.Background()).Set("id", fftypes.NewUUID()) err := s.UpdateOperation(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestOperationUpdateBuildQueryFail(t *testing.T) { @@ -226,5 +226,5 @@ func TestOperationUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.OperationQueryFactory.NewUpdate(context.Background()).Set("id", fftypes.NewUUID()) err := s.UpdateOperation(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/pin_sql.go b/internal/database/sqlcommon/pin_sql.go index a900a759ca..e0815f8bac 100644 --- a/internal/database/sqlcommon/pin_sql.go +++ b/internal/database/sqlcommon/pin_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/coremsgs" @@ -50,15 +52,15 @@ var ( const pinsTable = "pins" func (s *SQLCommon) UpsertPin(ctx context.Context, pin *core.Pin) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) // Do a select within the transaction to detemine if the UUID already exists - pinRows, tx, err := s.queryTx(ctx, pinsTable, tx, - sq.Select(sequenceColumn, "masked", "dispatched"). + pinRows, tx, err := s.QueryTx(ctx, pinsTable, tx, + sq.Select(s.SequenceColumn(), "masked", "dispatched"). From(pinsTable). Where(sq.Eq{ "hash": pin.Hash, @@ -87,11 +89,11 @@ func (s *SQLCommon) UpsertPin(ctx context.Context, pin *core.Pin) (err error) { } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } -func (s *SQLCommon) attemptPinInsert(ctx context.Context, tx *txWrapper, pin *core.Pin) (err error) { - pin.Sequence, err = s.insertTx(ctx, pinsTable, tx, +func (s *SQLCommon) attemptPinInsert(ctx context.Context, tx *dbsql.TXWrapper, pin *core.Pin) (err error) { + pin.Sequence, err = s.InsertTx(ctx, pinsTable, tx, s.setPinInsertValues(sq.Insert(pinsTable).Columns(pinColumns...), pin), func() { log.L(ctx).Debugf("Triggering creation event for pin %d", pin.Sequence) @@ -116,11 +118,11 @@ func (s *SQLCommon) setPinInsertValues(query sq.InsertBuilder, pin *core.Pin) sq } func (s *SQLCommon) InsertPins(ctx context.Context, pins []*core.Pin) error { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) if s.features.MultiRowInsert { query := sq.Insert(pinsTable).Columns(pinColumns...) @@ -128,7 +130,7 @@ func (s *SQLCommon) InsertPins(ctx context.Context, pins []*core.Pin) error { query = s.setPinInsertValues(query, pin) } sequences := make([]int64, len(pins)) - err := s.insertTxRows(ctx, pinsTable, tx, query, func() { + err := s.InsertTxRows(ctx, pinsTable, tx, query, func() { for i, pin := range pins { pin.Sequence = sequences[i] s.callbacks.OrderedCollectionNSEvent(database.CollectionPins, core.ChangeEventTypeCreated, pin.Namespace, pin.Sequence) @@ -146,7 +148,7 @@ func (s *SQLCommon) InsertPins(ctx context.Context, pins []*core.Pin) error { } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) pinResult(ctx context.Context, row *sql.Rows) (*core.Pin, error) { @@ -169,18 +171,18 @@ func (s *SQLCommon) pinResult(ctx context.Context, row *sql.Rows) (*core.Pin, er return &pin, nil } -func (s *SQLCommon) GetPins(ctx context.Context, namespace string, filter database.Filter) (message []*core.Pin, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetPins(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Pin, fr *ffapi.FilterResult, err error) { cols := append([]string{}, pinColumns...) - cols = append(cols, sequenceColumn) - query, fop, fi, err := s.filterSelect( + cols = append(cols, s.SequenceColumn()) + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select(cols...).From(pinsTable), filter, pinFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, pinsTable, query) + rows, tx, err := s.Query(ctx, pinsTable, query) if err != nil { return nil, nil, err } @@ -195,32 +197,32 @@ func (s *SQLCommon) GetPins(ctx context.Context, namespace string, filter databa pin = append(pin, d) } - return pin, s.queryRes(ctx, pinsTable, tx, fop, fi), err + return pin, s.QueryRes(ctx, pinsTable, tx, fop, fi), err } -func (s *SQLCommon) UpdatePins(ctx context.Context, namespace string, filter database.Filter, update database.Update) (err error) { +func (s *SQLCommon) UpdatePins(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(pinsTable).Where(sq.Eq{"namespace": namespace}), update, pinFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(pinsTable).Where(sq.Eq{"namespace": namespace}), update, pinFilterFieldMap) if err != nil { return err } - query, err = s.filterUpdate(ctx, query, filter, pinFilterFieldMap) + query, err = s.FilterUpdate(ctx, query, filter, pinFilterFieldMap) if err != nil { return err } - _, err = s.updateTx(ctx, pinsTable, tx, query, nil /* no change events filter based update */) + _, err = s.UpdateTx(ctx, pinsTable, tx, query, nil /* no change events filter based update */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/pin_sql_test.go b/internal/database/sqlcommon/pin_sql_test.go index e291d7bfb2..321042b5b8 100644 --- a/internal/database/sqlcommon/pin_sql_test.go +++ b/internal/database/sqlcommon/pin_sql_test.go @@ -90,7 +90,7 @@ func TestUpsertPinFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertPin(context.Background(), &core.Pin{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -101,7 +101,7 @@ func TestUpsertPinFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertPin(context.Background(), &core.Pin{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -111,7 +111,7 @@ func TestUpsertPinFailSelect(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertPin(context.Background(), &core.Pin{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -132,7 +132,7 @@ func TestUpsertPinFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertPin(context.Background(), &core.Pin{Hash: fftypes.NewRandB32()}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -140,7 +140,7 @@ func TestInsertPinsBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertPins(context.Background(), []*core.Pin{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -156,7 +156,7 @@ func TestInsertPinsMultiRowOK(t *testing.T) { s.callbacks.On("OrderedCollectionNSEvent", database.CollectionPins, core.ChangeEventTypeCreated, "ns1", int64(1002)) mock.ExpectBegin() - mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}). + mock.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()}). AddRow(int64(1001)). AddRow(int64(1002)), ) @@ -175,7 +175,7 @@ func TestInsertPinsMultiRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertPins(context.Background(), []*core.Pin{pin1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -186,7 +186,7 @@ func TestInsertPinsSingleRowFail(t *testing.T) { mock.ExpectBegin() mock.ExpectExec("INSERT.*").WillReturnError(fmt.Errorf("pop")) err := s.InsertPins(context.Background(), []*core.Pin{pin1}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) s.callbacks.AssertExpectations(t) } @@ -196,7 +196,7 @@ func TestGetPinQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.PinQueryFactory.NewFilter(context.Background()).Eq("hash", "") _, _, err := s.GetPins(context.Background(), "ns", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -221,7 +221,7 @@ func TestUpdatePinsBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) ctx := context.Background() err := s.UpdatePins(ctx, "ns1", database.PinQueryFactory.NewFilter(ctx).Eq("sequence", 1), database.PinQueryFactory.NewUpdate(ctx).Set("dispatched", true)) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestUpdatePinsUpdateFail(t *testing.T) { @@ -231,7 +231,7 @@ func TestUpdatePinsUpdateFail(t *testing.T) { mock.ExpectRollback() ctx := context.Background() err := s.UpdatePins(ctx, "ns1", database.PinQueryFactory.NewFilter(ctx).Eq("sequence", 1), database.PinQueryFactory.NewUpdate(ctx).Set("dispatched", true)) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } func TestUpdatePinsBadFilter(t *testing.T) { diff --git a/internal/database/sqlcommon/provider.go b/internal/database/sqlcommon/provider.go deleted file mode 100644 index 1bdfeaa27c..0000000000 --- a/internal/database/sqlcommon/provider.go +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "database/sql" - - sq "github.com/Masterminds/squirrel" - migratedb "github.com/golang-migrate/migrate/v4/database" -) - -const ( - sequenceColumn = "seq" -) - -type SQLFeatures struct { - UseILIKE bool - MultiRowInsert bool - PlaceholderFormat sq.PlaceholderFormat - AcquireLock func(lockName string) string -} - -func DefaultSQLProviderFeatures() SQLFeatures { - return SQLFeatures{ - UseILIKE: false, - MultiRowInsert: false, - PlaceholderFormat: sq.Dollar, - } -} - -// Provider defines the interface an individual provider muse implement to customize the SQLCommon implementation -type Provider interface { - - // Name is the name of the database driver - Name() string - - // MigrationDir is the subdirectory for migrations - MigrationsDir() string - - // Open creates the DB instances - Open(url string) (*sql.DB, error) - - // GetDriver returns the driver implementation - GetMigrationDriver(*sql.DB) (migratedb.Driver, error) - - // Features returns database specific configuration switches - Features() SQLFeatures - - // ApplyInsertQueryCustomizations updates the INSERT query for returning the Sequence, and returns whether it needs to be run as a query to return the Sequence field - ApplyInsertQueryCustomizations(insert sq.InsertBuilder, requestConflictEmptyResult bool) (updatedInsert sq.InsertBuilder, runAsQuery bool) -} diff --git a/internal/database/sqlcommon/provider_mock_test.go b/internal/database/sqlcommon/provider_mock_test.go index b507af5db3..b3cdf51761 100644 --- a/internal/database/sqlcommon/provider_mock_test.go +++ b/internal/database/sqlcommon/provider_mock_test.go @@ -25,6 +25,7 @@ import ( sq "github.com/Masterminds/squirrel" migratedb "github.com/golang-migrate/migrate/v4/database" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly/internal/coreconfig" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/pkg/database" @@ -72,12 +73,16 @@ func (mp *mockProvider) Name() string { return "mockdb" } +func (mp *mockProvider) SequenceColumn() string { + return "seq" +} + func (mp *mockProvider) MigrationsDir() string { return mp.Name() } -func (psql *mockProvider) Features() SQLFeatures { - features := DefaultSQLProviderFeatures() +func (psql *mockProvider) Features() dbsql.SQLFeatures { + features := dbsql.DefaultSQLProviderFeatures() features.UseILIKE = true features.AcquireLock = func(lockName string) string { return fmt.Sprintf(``, lockName) diff --git a/internal/database/sqlcommon/provider_sqlitego_test.go b/internal/database/sqlcommon/provider_sqlitego_test.go index d57e7f02ad..78fee5016c 100644 --- a/internal/database/sqlcommon/provider_sqlitego_test.go +++ b/internal/database/sqlcommon/provider_sqlitego_test.go @@ -27,6 +27,7 @@ import ( migratedb "github.com/golang-migrate/migrate/v4/database" "github.com/golang-migrate/migrate/v4/database/sqlite3" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/pkg/database" "github.com/stretchr/testify/assert" @@ -81,8 +82,12 @@ func (tp *sqliteGoTestProvider) MigrationsDir() string { return "sqlite" } -func (psql *sqliteGoTestProvider) Features() SQLFeatures { - features := DefaultSQLProviderFeatures() +func (mp *sqliteGoTestProvider) SequenceColumn() string { + return "seq" +} + +func (psql *sqliteGoTestProvider) Features() dbsql.SQLFeatures { + features := dbsql.DefaultSQLProviderFeatures() features.PlaceholderFormat = sq.Dollar features.UseILIKE = false // Not supported return features diff --git a/internal/database/sqlcommon/sqlcommon.go b/internal/database/sqlcommon/sqlcommon.go index 318ca1b645..1105604b8f 100644 --- a/internal/database/sqlcommon/sqlcommon.go +++ b/internal/database/sqlcommon/sqlcommon.go @@ -18,30 +18,22 @@ package sqlcommon import ( "context" - "database/sql" - "fmt" - sq "github.com/Masterminds/squirrel" - "github.com/golang-migrate/migrate/v4" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly-common/pkg/i18n" - "github.com/hyperledger/firefly-common/pkg/log" - "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" "github.com/hyperledger/firefly/pkg/database" - "github.com/sirupsen/logrus" // Import migrate file source _ "github.com/golang-migrate/migrate/v4/source/file" ) type SQLCommon struct { - db *sql.DB + dbsql.Database capabilities *database.Capabilities callbacks callbacks - provider Provider - features SQLFeatures + features dbsql.SQLFeatures } type callbacks struct { @@ -84,55 +76,9 @@ func (cb *callbacks) HashCollectionNSEvent(resType database.HashCollectionNS, ev } } -type txContextKey struct{} - -type txWrapper struct { - sqlTX *sql.Tx - preCommitEvents []*core.Event - postCommit []func() -} - -func (s *SQLCommon) Init(ctx context.Context, provider Provider, config config.Section, capabilities *database.Capabilities) (err error) { +func (s *SQLCommon) Init(ctx context.Context, provider dbsql.Provider, config config.Section, capabilities *database.Capabilities) (err error) { s.capabilities = capabilities - s.provider = provider - if s.provider != nil { - s.features = s.provider.Features() - } - if s.provider == nil || s.features.PlaceholderFormat == nil { - log.L(ctx).Errorf("Invalid SQL options from provider '%T'", s.provider) - return i18n.NewError(ctx, coremsgs.MsgDBInitFailed) - } - - if config.GetString(SQLConfDatasourceURL) == "" { - return i18n.NewError(ctx, coremsgs.MsgMissingPluginConfig, "url", fmt.Sprintf("database.%s", s.provider.Name())) - } - - if s.db, err = provider.Open(config.GetString(SQLConfDatasourceURL)); err != nil { - return i18n.WrapError(ctx, err, coremsgs.MsgDBInitFailed) - } - connLimit := config.GetInt(SQLConfMaxConnections) - if connLimit > 0 { - s.db.SetMaxOpenConns(connLimit) - s.db.SetConnMaxIdleTime(config.GetDuration(SQLConfMaxConnIdleTime)) - maxIdleConns := config.GetInt(SQLConfMaxIdleConns) - if maxIdleConns <= 0 { - // By default we rely on the idle time, rather than a maximum number of conns to leave open - maxIdleConns = connLimit - } - s.db.SetMaxIdleConns(maxIdleConns) - s.db.SetConnMaxLifetime(config.GetDuration(SQLConfMaxConnLifetime)) - } - if connLimit > 1 { - capabilities.Concurrency = true - } - - if config.GetBool(SQLConfMigrationsAuto) { - if err = s.applyDBMigrations(ctx, config, provider); err != nil { - return i18n.WrapError(ctx, err, coremsgs.MsgDBMigrationFailed) - } - } - - return nil + return s.Database.Init(ctx, provider, config) } func (s *SQLCommon) SetHandler(namespace string, handler database.Callbacks) { @@ -143,349 +89,3 @@ func (s *SQLCommon) SetHandler(namespace string, handler database.Callbacks) { } func (s *SQLCommon) Capabilities() *database.Capabilities { return s.capabilities } - -func (s *SQLCommon) RunAsGroup(ctx context.Context, fn func(ctx context.Context) error) error { - if tx := getTXFromContext(ctx); tx != nil { - // transaction already exists - just continue using it - return fn(ctx) - } - - ctx, tx, _, err := s.beginOrUseTx(ctx) - if err != nil { - return err - } - defer s.rollbackTx(ctx, tx, false /* we _are_ the auto-committer */) - - if err = fn(ctx); err != nil { - return err - } - - return s.commitTx(ctx, tx, false /* we _are_ the auto-committer */) -} - -func (s *SQLCommon) applyDBMigrations(ctx context.Context, config config.Section, provider Provider) error { - driver, err := provider.GetMigrationDriver(s.db) - if err == nil { - var m *migrate.Migrate - m, err = migrate.NewWithDatabaseInstance( - "file://"+config.GetString(SQLConfMigrationsDirectory), - provider.MigrationsDir(), driver) - if err == nil { - err = m.Up() - } - } - if err != nil && err != migrate.ErrNoChange { - return i18n.WrapError(ctx, err, coremsgs.MsgDBMigrationFailed) - } - return nil -} - -func getTXFromContext(ctx context.Context) *txWrapper { - ctxKey := txContextKey{} - txi := ctx.Value(ctxKey) - if txi != nil { - if tx, ok := txi.(*txWrapper); ok { - return tx - } - } - return nil -} - -func (s *SQLCommon) beginOrUseTx(ctx context.Context) (ctx1 context.Context, tx *txWrapper, autoCommit bool, err error) { - - tx = getTXFromContext(ctx) - if tx != nil { - // There is s transaction on the context already. - // return existing with auto-commit flag, to prevent early commit - return ctx, tx, true, nil - } - - l := log.L(ctx).WithField("dbtx", fftypes.ShortID()) - ctx1 = log.WithLogger(ctx, l) - l.Debugf("SQL-> begin") - sqlTX, err := s.db.Begin() - if err != nil { - return ctx1, nil, false, i18n.WrapError(ctx1, err, coremsgs.MsgDBBeginFailed) - } - tx = &txWrapper{ - sqlTX: sqlTX, - } - ctx1 = context.WithValue(ctx1, txContextKey{}, tx) - l.Debugf("SQL<- begin") - return ctx1, tx, false, err -} - -func (s *SQLCommon) queryTx(ctx context.Context, table string, tx *txWrapper, q sq.SelectBuilder) (*sql.Rows, *txWrapper, error) { - if tx == nil { - // If there is a transaction in the context, we should use it to provide consistency - // in the read operations (read after insert for example). - tx = getTXFromContext(ctx) - } - - l := log.L(ctx) - sqlQuery, args, err := q.PlaceholderFormat(s.features.PlaceholderFormat).ToSql() - if err != nil { - return nil, tx, i18n.WrapError(ctx, err, coremsgs.MsgDBQueryBuildFailed) - } - l.Debugf(`SQL-> query %s`, table) - l.Tracef(`SQL-> query: %s (args: %+v)`, sqlQuery, args) - var rows *sql.Rows - if tx != nil { - rows, err = tx.sqlTX.QueryContext(ctx, sqlQuery, args...) - } else { - rows, err = s.db.QueryContext(ctx, sqlQuery, args...) - } - if err != nil { - l.Errorf(`SQL query failed: %s sql=[ %s ]`, err, sqlQuery) - return nil, tx, i18n.WrapError(ctx, err, coremsgs.MsgDBQueryFailed) - } - l.Debugf(`SQL<- query %s`, table) - return rows, tx, nil -} - -func (s *SQLCommon) query(ctx context.Context, table string, q sq.SelectBuilder) (*sql.Rows, *txWrapper, error) { - return s.queryTx(ctx, table, nil, q) -} - -func (s *SQLCommon) countQuery(ctx context.Context, table string, tx *txWrapper, fop sq.Sqlizer, countExpr string) (count int64, err error) { - count = -1 - l := log.L(ctx) - if tx == nil { - // If there is a transaction in the context, we should use it to provide consistency - // in the read operations (read after insert for example). - tx = getTXFromContext(ctx) - } - if countExpr == "" { - countExpr = "*" - } - q := sq.Select(fmt.Sprintf("COUNT(%s)", countExpr)).From(table).Where(fop) - sqlQuery, args, err := q.PlaceholderFormat(s.features.PlaceholderFormat).ToSql() - if err != nil { - return count, i18n.WrapError(ctx, err, coremsgs.MsgDBQueryBuildFailed) - } - l.Debugf(`SQL-> count query %s`, table) - l.Tracef(`SQL-> count query: %s (args: %+v)`, sqlQuery, args) - var rows *sql.Rows - if tx != nil { - rows, err = tx.sqlTX.QueryContext(ctx, sqlQuery, args...) - } else { - rows, err = s.db.QueryContext(ctx, sqlQuery, args...) - } - if err != nil { - l.Errorf(`SQL count query failed: %s sql=[ %s ]`, err, sqlQuery) - return count, i18n.WrapError(ctx, err, coremsgs.MsgDBQueryFailed) - } - defer rows.Close() - if rows.Next() { - if err = rows.Scan(&count); err != nil { - return count, i18n.WrapError(ctx, err, coremsgs.MsgDBReadErr, table) - } - } - l.Debugf(`SQL<- count query %s: %d`, table, count) - return count, nil -} - -func (s *SQLCommon) queryRes(ctx context.Context, table string, tx *txWrapper, fop sq.Sqlizer, fi *database.FilterInfo) *database.FilterResult { - fr := &database.FilterResult{} - if fi.Count { - count, err := s.countQuery(ctx, table, tx, fop, fi.CountExpr) - if err != nil { - // Log, but continue - log.L(ctx).Warnf("Unable to return count for query: %s", err) - } - fr.TotalCount = &count // could be -1 if the count extract fails - we still return the result - } - return fr -} - -func (s *SQLCommon) insertTx(ctx context.Context, table string, tx *txWrapper, q sq.InsertBuilder, postCommit func()) (int64, error) { - return s.insertTxExt(ctx, table, tx, q, postCommit, false) -} - -func (s *SQLCommon) insertTxExt(ctx context.Context, table string, tx *txWrapper, q sq.InsertBuilder, postCommit func(), requestConflictEmptyResult bool) (int64, error) { - sequences := []int64{-1} - err := s.insertTxRows(ctx, table, tx, q, postCommit, sequences, requestConflictEmptyResult) - return sequences[0], err -} - -func (s *SQLCommon) insertTxRows(ctx context.Context, table string, tx *txWrapper, q sq.InsertBuilder, postCommit func(), sequences []int64, requestConflictEmptyResult bool) error { - l := log.L(ctx) - q, useQuery := s.provider.ApplyInsertQueryCustomizations(q, requestConflictEmptyResult) - - sqlQuery, args, err := q.PlaceholderFormat(s.features.PlaceholderFormat).ToSql() - if err != nil { - return i18n.WrapError(ctx, err, coremsgs.MsgDBQueryBuildFailed) - } - l.Debugf(`SQL-> insert %s`, table) - l.Tracef(`SQL-> insert query: %s (args: %+v)`, sqlQuery, args) - if useQuery { - result, err := tx.sqlTX.QueryContext(ctx, sqlQuery, args...) - for i := 0; i < len(sequences) && err == nil; i++ { - if result.Next() { - err = result.Scan(&sequences[i]) - } else { - err = i18n.NewError(ctx, coremsgs.MsgDBNoSequence, i+1) - } - } - if result != nil { - result.Close() - } - if err != nil { - level := logrus.DebugLevel - if !requestConflictEmptyResult { - level = logrus.ErrorLevel - } - l.Logf(level, `SQL insert failed (conflictEmptyRequested=%t): %s sql=[ %s ]: %s`, requestConflictEmptyResult, err, sqlQuery, err) - return i18n.WrapError(ctx, err, coremsgs.MsgDBInsertFailed) - } - } else { - if len(sequences) > 1 { - return i18n.WrapError(ctx, err, coremsgs.MsgDBMultiRowConfigError) - } - res, err := tx.sqlTX.ExecContext(ctx, sqlQuery, args...) - if err != nil { - l.Errorf(`SQL insert failed: %s sql=[ %s ]: %s`, err, sqlQuery, err) - return i18n.WrapError(ctx, err, coremsgs.MsgDBInsertFailed) - } - sequences[0], _ = res.LastInsertId() - } - l.Debugf(`SQL<- insert %s sequences=%v`, table, sequences) - - if postCommit != nil { - s.postCommitEvent(tx, postCommit) - } - return nil -} - -func (s *SQLCommon) deleteTx(ctx context.Context, table string, tx *txWrapper, q sq.DeleteBuilder, postCommit func()) error { - l := log.L(ctx) - sqlQuery, args, err := q.PlaceholderFormat(s.features.PlaceholderFormat).ToSql() - if err != nil { - return i18n.WrapError(ctx, err, coremsgs.MsgDBQueryBuildFailed) - } - l.Debugf(`SQL-> delete %s`, table) - l.Tracef(`SQL-> delete query: %s args: %+v`, sqlQuery, args) - res, err := tx.sqlTX.ExecContext(ctx, sqlQuery, args...) - if err != nil { - l.Errorf(`SQL delete failed: %s sql=[ %s ]: %s`, err, sqlQuery, err) - return i18n.WrapError(ctx, err, coremsgs.MsgDBDeleteFailed) - } - ra, _ := res.RowsAffected() - l.Debugf(`SQL<- delete %s affected=%d`, table, ra) - if ra < 1 { - return database.DeleteRecordNotFound - } - - if postCommit != nil { - s.postCommitEvent(tx, postCommit) - } - return nil -} - -func (s *SQLCommon) updateTx(ctx context.Context, table string, tx *txWrapper, q sq.UpdateBuilder, postCommit func()) (int64, error) { - l := log.L(ctx) - sqlQuery, args, err := q.PlaceholderFormat(s.features.PlaceholderFormat).ToSql() - if err != nil { - return -1, i18n.WrapError(ctx, err, coremsgs.MsgDBQueryBuildFailed) - } - l.Debugf(`SQL-> update %s`, table) - l.Tracef(`SQL-> update query: %s (args: %+v)`, sqlQuery, args) - res, err := tx.sqlTX.ExecContext(ctx, sqlQuery, args...) - if err != nil { - l.Errorf(`SQL update failed: %s sql=[ %s ]`, err, sqlQuery) - return -1, i18n.WrapError(ctx, err, coremsgs.MsgDBUpdateFailed) - } - ra, _ := res.RowsAffected() - l.Debugf(`SQL<- update %s affected=%d`, table, ra) - - if postCommit != nil { - s.postCommitEvent(tx, postCommit) - } - return ra, nil -} - -func (s *SQLCommon) postCommitEvent(tx *txWrapper, fn func()) { - tx.postCommit = append(tx.postCommit, fn) -} - -func (s *SQLCommon) addPreCommitEvent(tx *txWrapper, event *core.Event) { - tx.preCommitEvents = append(tx.preCommitEvents, event) -} - -func (s *SQLCommon) acquireLockTx(ctx context.Context, lockName string, tx *txWrapper) error { - l := log.L(ctx) - if s.features.AcquireLock != nil { - sqlQuery := s.features.AcquireLock(lockName) - - l.Debugf(`SQL-> lock %s`, lockName) - _, err := tx.sqlTX.ExecContext(ctx, sqlQuery) - if err != nil { - l.Errorf(`SQL lock failed: %s sql=[ %s ]`, err, sqlQuery) - return i18n.WrapError(ctx, err, coremsgs.MsgDBLockFailed) - } - l.Debugf(`SQL<- lock %s`, lockName) - } - return nil -} - -// rollbackTx be safely called as a defer, as it is a cheap no-op if the transaction is complete -func (s *SQLCommon) rollbackTx(ctx context.Context, tx *txWrapper, autoCommit bool) { - if autoCommit { - // We're inside of a wide transaction boundary with an auto-commit - return - } - - err := tx.sqlTX.Rollback() - if err == nil { - log.L(ctx).Warnf("SQL! transaction rollback") - } - if err != nil && err != sql.ErrTxDone { - log.L(ctx).Errorf(`SQL rollback failed: %s`, err) - } -} - -func (s *SQLCommon) commitTx(ctx context.Context, tx *txWrapper, autoCommit bool) error { - if autoCommit { - // We're inside of a wide transaction boundary with an auto-commit - return nil - } - l := log.L(ctx) - - // Only at this stage do we write to the special events Database table, so we know - // regardless of the higher level logic, the events are always written at this point - // at the end of the transaction - if len(tx.preCommitEvents) > 0 { - if err := s.insertEventsPreCommit(ctx, tx, tx.preCommitEvents); err != nil { - s.rollbackTx(ctx, tx, false) - return err - } - } - - l.Debugf(`SQL-> commit`) - err := tx.sqlTX.Commit() - if err != nil { - l.Errorf(`SQL commit failed: %s`, err) - return i18n.WrapError(ctx, err, coremsgs.MsgDBCommitFailed) - } - l.Debugf(`SQL<- commit`) - - // Emit any post commit events (these aren't currently allowed to cause errors) - for i, pce := range tx.postCommit { - l.Tracef(`-> post commit event %d`, i) - pce() - l.Tracef(`<- post commit event %d`, i) - } - - return nil -} - -func (s *SQLCommon) DB() *sql.DB { - return s.db -} - -func (s *SQLCommon) Close() { - if s.db != nil { - err := s.db.Close() - log.L(context.Background()).Debugf("Database closed (err=%v)", err) - } -} diff --git a/internal/database/sqlcommon/sqlcommon_test.go b/internal/database/sqlcommon/sqlcommon_test.go index 9fe69d94ee..9d909043fe 100644 --- a/internal/database/sqlcommon/sqlcommon_test.go +++ b/internal/database/sqlcommon/sqlcommon_test.go @@ -18,14 +18,11 @@ package sqlcommon import ( "context" - "database/sql/driver" "fmt" "testing" - "github.com/DATA-DOG/go-sqlmock" sq "github.com/Masterminds/squirrel" "github.com/golang-migrate/migrate/v4" - "github.com/hyperledger/firefly-common/pkg/config" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/pkg/core" @@ -33,55 +30,13 @@ import ( "github.com/stretchr/testify/assert" ) -func TestInitSQLCommonMissingURL(t *testing.T) { - conf := config.RootSection("unittest.db") - conf.AddKnownKey("url", "") - s := &SQLCommon{} - tp := &sqliteGoTestProvider{ - t: t, - callbacks: &databasemocks.Callbacks{}, - capabilities: &database.Capabilities{}, - config: conf, - } - s.InitConfig(tp, conf) - err := s.Init(context.Background(), tp, conf, nil) - assert.Regexp(t, "FF10138.*url", err) -} - -func TestInitSQLCommon(t *testing.T) { - s, cleanup := newSQLiteTestProvider(t) - defer cleanup() - assert.NotNil(t, s.Capabilities()) - assert.NotNil(t, s.DB()) -} - -func TestInitSQLCommonMissingOptions(t *testing.T) { - s := &SQLCommon{} - err := s.Init(context.Background(), nil, nil, nil) - assert.Regexp(t, "FF10112", err) -} - -func TestInitSQLCommonOpenFailed(t *testing.T) { - mp := newMockProvider() - mp.openError = fmt.Errorf("pop") - err := mp.SQLCommon.Init(context.Background(), mp, mp.config, mp.capabilities) - assert.Regexp(t, "FF10112.*pop", err) -} - -func TestInitSQLCommonMigrationOpenFailed(t *testing.T) { - mp := newMockProvider() - mp.config.Set(SQLConfMigrationsAuto, true) - mp.getMigrationDriverError = fmt.Errorf("pop") - err := mp.SQLCommon.Init(context.Background(), mp, mp.config, mp.capabilities) - assert.Regexp(t, "FF10163.*pop", err) -} - func TestMigrationUpDown(t *testing.T) { tp, cleanup := newSQLiteTestProvider(t) defer cleanup() - driver, err := tp.GetMigrationDriver(tp.db) + driver, err := tp.GetMigrationDriver(tp.DB()) assert.NoError(t, err) + assert.NotNil(t, tp.Capabilities()) var m *migrate.Migrate m, err = migrate.NewWithDatabaseInstance( "file://../../../db/migrations/sqlite", @@ -91,160 +46,6 @@ func TestMigrationUpDown(t *testing.T) { assert.NoError(t, err) } -func TestQueryTxBadSQL(t *testing.T) { - tp, cleanup := newSQLiteTestProvider(t) - defer cleanup() - _, _, err := tp.queryTx(context.Background(), "table1", nil, sq.SelectBuilder{}) - assert.Regexp(t, "FF10113", err) -} - -func TestInsertTxPostgreSQLReturnedSyntax(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - mdb.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}).AddRow(12345)) - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - s.fakePSQLInsert = true - sb := sq.Insert("table").Columns("col1").Values(("val1")) - sequence, err := s.insertTx(ctx, "table1", tx, sb, nil) - assert.NoError(t, err) - assert.Equal(t, int64(12345), sequence) -} - -func TestInsertTxPostgreSQLReturnedSyntaxFail(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - mdb.ExpectQuery("INSERT.*").WillReturnError(fmt.Errorf("pop")) - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - s.fakePSQLInsert = true - sb := sq.Insert("table").Columns("col1").Values(("val1")) - _, err = s.insertTx(ctx, "table1", tx, sb, nil) - assert.Regexp(t, "FF10116", err) -} - -func TestInsertTxBadSQL(t *testing.T) { - s, _ := newMockProvider().init() - _, err := s.insertTx(context.Background(), "table1", nil, sq.InsertBuilder{}, nil) - assert.Regexp(t, "FF10113", err) -} - -func TestUpdateTxBadSQL(t *testing.T) { - s, _ := newMockProvider().init() - _, err := s.updateTx(context.Background(), "table1", nil, sq.UpdateBuilder{}, nil) - assert.Regexp(t, "FF10113", err) -} - -func TestDeleteTxBadSQL(t *testing.T) { - s, _ := newMockProvider().init() - err := s.deleteTx(context.Background(), "table1", nil, sq.DeleteBuilder{}, nil) - assert.Regexp(t, "FF10113", err) -} - -func TestDeleteTxZeroRowsAffected(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - mdb.ExpectExec("DELETE.*").WillReturnResult(driver.ResultNoRows) - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - s.fakePSQLInsert = true - sb := sq.Delete("table") - err = s.deleteTx(ctx, "table1", tx, sb, nil) - assert.Regexp(t, "FF10109", err) -} - -func TestRunAsGroup(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectExec("INSERT.*").WillReturnResult(driver.ResultNoRows) - mock.ExpectExec("INSERT.*").WillReturnResult(driver.ResultNoRows) - mock.ExpectQuery("SELECT.*").WillReturnRows(sqlmock.NewRows([]string{"id"})) - mock.ExpectCommit() - - err := s.RunAsGroup(context.Background(), func(ctx context.Context) (err error) { - // First insert - ctx, tx, ac, err := s.beginOrUseTx(ctx) - assert.NoError(t, err) - _, err = s.insertTx(ctx, "table1", tx, sq.Insert("test").Columns("test").Values("test"), nil) - assert.NoError(t, err) - err = s.commitTx(ctx, tx, ac) - assert.NoError(t, err) - - // Second insert - ctx, tx, ac, err = s.beginOrUseTx(ctx) - assert.NoError(t, err) - _, err = s.insertTx(ctx, "table1", tx, sq.Insert("test").Columns("test").Values("test"), nil) - assert.NoError(t, err) - err = s.commitTx(ctx, tx, ac) - assert.NoError(t, err) - - // Query, not specifying a transaction - _, _, err = s.query(ctx, "table1", sq.Select("test").From("test")) - assert.NoError(t, err) - - // Nested call - err = s.RunAsGroup(ctx, func(ctx2 context.Context) error { - assert.Equal(t, ctx, ctx2) - return nil - }) - assert.NoError(t, err) - - return - }) - - assert.NoError(t, mock.ExpectationsWereMet()) - assert.NoError(t, err) -} - -func TestRunAsGroupBeginFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) - err := s.RunAsGroup(context.Background(), func(ctx context.Context) (err error) { - return - }) - assert.NoError(t, mock.ExpectationsWereMet()) - assert.Regexp(t, "FF10114", err) -} - -func TestRunAsGroupFunctionFails(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectExec("INSERT.*").WillReturnResult(driver.ResultNoRows) - mock.ExpectRollback() - err := s.RunAsGroup(context.Background(), func(ctx context.Context) (err error) { - ctx, tx, ac, err := s.beginOrUseTx(ctx) - assert.NoError(t, err) - _, err = s.insertTx(ctx, "table1", tx, sq.Insert("test").Columns("test").Values("test"), nil) - assert.NoError(t, err) - s.rollbackTx(ctx, tx, ac) // won't actually rollback - assert.NoError(t, err) - - return fmt.Errorf("pop") - }) - assert.NoError(t, mock.ExpectationsWereMet()) - assert.Regexp(t, "pop", err) -} - -func TestRunAsGroupCommitFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) - err := s.RunAsGroup(context.Background(), func(ctx context.Context) (err error) { - return - }) - assert.NoError(t, mock.ExpectationsWereMet()) - assert.Regexp(t, "FF10119", err) -} - -func TestRollbackFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - tx, _ := s.db.Begin() - mock.ExpectRollback().WillReturnError(fmt.Errorf("pop")) - s.rollbackTx(context.Background(), &txWrapper{sqlTX: tx}, false) - assert.NoError(t, mock.ExpectationsWereMet()) -} - func TestTXConcurrency(t *testing.T) { s, cleanup := newSQLiteTestProvider(t) defer cleanup() @@ -258,7 +59,7 @@ func TestTXConcurrency(t *testing.T) { // test should be included. // (additional refactor required - see https://github.com/hyperledger/firefly/issues/119) - _, err := s.db.Exec(` + _, err := s.DB().Exec(` CREATE TABLE testconc ( seq INTEGER PRIMARY KEY AUTOINCREMENT, val VARCHAR(256) ); `) assert.NoError(t, err) @@ -267,13 +68,13 @@ func TestTXConcurrency(t *testing.T) { return func() { defer close(done) for i := 0; i < 5; i++ { - ctx, tx, ac, err := s.beginOrUseTx(context.Background()) + ctx, tx, ac, err := s.BeginOrUseTx(context.Background()) assert.NoError(t, err) val := fmt.Sprintf("%s/%d", name, i) - sequence, err := s.insertTx(ctx, "table1", tx, sq.Insert("testconc").Columns("val").Values(val), nil) + sequence, err := s.InsertTx(ctx, "table1", tx, sq.Insert("testconc").Columns("val").Values(val), nil) assert.NoError(t, err) t.Logf("%s = %d", val, sequence) - err = s.commitTx(ctx, tx, ac) + err = s.CommitTx(ctx, tx, ac) assert.NoError(t, err) } } @@ -289,68 +90,6 @@ func TestTXConcurrency(t *testing.T) { } } -func TestCountQueryBadSQL(t *testing.T) { - s, _ := newMockProvider().init() - _, err := s.countQuery(context.Background(), "table1", nil, sq.Insert("wrong"), "") - assert.Regexp(t, "FF10113", err) -} - -func TestCountQueryQueryFailed(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectQuery("^SELECT COUNT\\(\\*\\)").WillReturnError(fmt.Errorf("pop")) - _, err := s.countQuery(context.Background(), "table1", nil, sq.Eq{"col1": "val1"}, "") - assert.Regexp(t, "FF10115.*pop", err) -} - -func TestCountQueryScanFailTx(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - mdb.ExpectQuery("^SELECT COUNT\\(\\*\\)").WillReturnRows(sqlmock.NewRows([]string{"col1"}).AddRow("not a number")) - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - _, err = s.countQuery(ctx, "table1", tx, sq.Eq{"col1": "val1"}, "") - assert.Regexp(t, "FF10121", err) -} - -func TestCountQueryWithExpr(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectQuery("^SELECT COUNT\\(DISTINCT key\\)").WillReturnRows(sqlmock.NewRows([]string{"col1"}).AddRow(10)) - _, err := s.countQuery(context.Background(), "table1", nil, sq.Eq{"col1": "val1"}, "DISTINCT key") - assert.NoError(t, err) - assert.NoError(t, mdb.ExpectationsWereMet()) -} - -func TestQueryResSwallowError(t *testing.T) { - s, _ := newMockProvider().init() - res := s.queryRes(context.Background(), "table1", nil, sq.Insert("wrong"), &database.FilterInfo{ - Count: true, - }) - assert.Equal(t, int64(-1), *res.TotalCount) -} - -func TestInsertTxRowsBadConfig(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - s.fakePSQLInsert = false - sb := sq.Insert("table").Columns("col1").Values(("val1")) - err = s.insertTxRows(ctx, "table1", tx, sb, nil, []int64{1, 2}, false) - assert.Regexp(t, "FF10374", err) -} - -func TestInsertTxRowsIncompleteReturn(t *testing.T) { - s, mdb := newMockProvider().init() - mdb.ExpectBegin() - mdb.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{sequenceColumn}).AddRow(int64(1001))) - ctx, tx, _, err := s.beginOrUseTx(context.Background()) - assert.NoError(t, err) - s.fakePSQLInsert = true - sb := sq.Insert("table").Columns("col1").Values(("val1")) - err = s.insertTxRows(ctx, "table1", tx, sb, nil, []int64{1, 2}, false) - assert.Regexp(t, "FF10116", err) -} - func TestNamespaceCallbacks(t *testing.T) { tcb := &databasemocks.Callbacks{} cb := callbacks{ diff --git a/internal/database/sqlcommon/subscription_sql.go b/internal/database/sqlcommon/subscription_sql.go index cd21de9ba9..2e887f222d 100644 --- a/internal/database/sqlcommon/subscription_sql.go +++ b/internal/database/sqlcommon/subscription_sql.go @@ -22,6 +22,7 @@ import ( "fmt" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -47,16 +48,16 @@ var ( const subscriptionsTable = "subscriptions" func (s *SQLCommon) UpsertSubscription(ctx context.Context, subscription *core.Subscription, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) existing := false if allowExisting { // Do a select within the transaction to detemine if the UUID already exists - subscriptionRows, _, err := s.queryTx(ctx, subscriptionsTable, tx, + subscriptionRows, _, err := s.QueryTx(ctx, subscriptionsTable, tx, sq.Select("id"). From(subscriptionsTable). Where(sq.Eq{ @@ -85,7 +86,7 @@ func (s *SQLCommon) UpsertSubscription(ctx context.Context, subscription *core.S if existing { // Update the subscription - if _, err = s.updateTx(ctx, subscriptionsTable, tx, + if _, err = s.UpdateTx(ctx, subscriptionsTable, tx, sq.Update(subscriptionsTable). // Note we do not update ID Set("name", subscription.Name). @@ -109,7 +110,7 @@ func (s *SQLCommon) UpsertSubscription(ctx context.Context, subscription *core.S subscription.ID = fftypes.NewUUID() } - if _, err = s.insertTx(ctx, subscriptionsTable, tx, + if _, err = s.InsertTx(ctx, subscriptionsTable, tx, sq.Insert(subscriptionsTable). Columns(subscriptionColumns...). Values( @@ -131,7 +132,7 @@ func (s *SQLCommon) UpsertSubscription(ctx context.Context, subscription *core.S } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) subscriptionResult(ctx context.Context, row *sql.Rows) (*core.Subscription, error) { @@ -154,7 +155,7 @@ func (s *SQLCommon) subscriptionResult(ctx context.Context, row *sql.Rows) (*cor func (s *SQLCommon) getSubscriptionEq(ctx context.Context, eq sq.Eq, textName string) (message *core.Subscription, err error) { - rows, _, err := s.query(ctx, subscriptionsTable, + rows, _, err := s.Query(ctx, subscriptionsTable, sq.Select(subscriptionColumns...). From(subscriptionsTable). Where(eq), @@ -185,16 +186,16 @@ func (s *SQLCommon) GetSubscriptionByName(ctx context.Context, namespace, name s return s.getSubscriptionEq(ctx, sq.Eq{"namespace": namespace, "name": name}, fmt.Sprintf("%s:%s", namespace, name)) } -func (s *SQLCommon) GetSubscriptions(ctx context.Context, namespace string, filter database.Filter) (message []*core.Subscription, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetSubscriptions(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Subscription, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect( + query, fop, fi, err := s.FilterSelect( ctx, "", sq.Select(subscriptionColumns...).From(subscriptionsTable), filter, subscriptionFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, subscriptionsTable, query) + rows, tx, err := s.Query(ctx, subscriptionsTable, query) if err != nil { return nil, nil, err } @@ -209,17 +210,17 @@ func (s *SQLCommon) GetSubscriptions(ctx context.Context, namespace string, filt subscription = append(subscription, d) } - return subscription, s.queryRes(ctx, subscriptionsTable, tx, fop, fi), err + return subscription, s.QueryRes(ctx, subscriptionsTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateSubscription(ctx context.Context, namespace, name string, update database.Update) (err error) { +func (s *SQLCommon) UpdateSubscription(ctx context.Context, namespace, name string, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) subscription, err := s.GetSubscriptionByName(ctx, namespace, name) if err != nil { @@ -229,13 +230,13 @@ func (s *SQLCommon) UpdateSubscription(ctx context.Context, namespace, name stri return i18n.NewError(ctx, coremsgs.Msg404NoResult) } - query, err := s.buildUpdate(sq.Update(subscriptionsTable), update, subscriptionFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(subscriptionsTable), update, subscriptionFilterFieldMap) if err != nil { return err } query = query.Where(sq.Eq{"id": subscription.ID}) - _, err = s.updateTx(ctx, subscriptionsTable, tx, query, + _, err = s.UpdateTx(ctx, subscriptionsTable, tx, query, func() { s.callbacks.UUIDCollectionNSEvent(database.CollectionSubscriptions, core.ChangeEventTypeUpdated, subscription.Namespace, subscription.ID) }) @@ -243,20 +244,20 @@ func (s *SQLCommon) UpdateSubscription(ctx context.Context, namespace, name stri return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) DeleteSubscriptionByID(ctx context.Context, namespace string, id *fftypes.UUID) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) subscription, err := s.GetSubscriptionByID(ctx, namespace, id) if err == nil && subscription != nil { - err = s.deleteTx(ctx, subscriptionsTable, tx, sq.Delete(subscriptionsTable).Where(sq.Eq{ + err = s.DeleteTx(ctx, subscriptionsTable, tx, sq.Delete(subscriptionsTable).Where(sq.Eq{ "id": id, }), func() { @@ -267,5 +268,5 @@ func (s *SQLCommon) DeleteSubscriptionByID(ctx context.Context, namespace string } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/subscription_sql_test.go b/internal/database/sqlcommon/subscription_sql_test.go index 62b3975f1b..b96db645f1 100644 --- a/internal/database/sqlcommon/subscription_sql_test.go +++ b/internal/database/sqlcommon/subscription_sql_test.go @@ -150,7 +150,7 @@ func TestUpsertSubscriptionFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertSubscription(context.Background(), &core.Subscription{}, true) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -160,7 +160,7 @@ func TestUpsertSubscriptionFailSelect(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertSubscription(context.Background(), &core.Subscription{SubscriptionRef: core.SubscriptionRef{Name: "name1"}}, true) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -171,7 +171,7 @@ func TestUpsertSubscriptionFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertSubscription(context.Background(), &core.Subscription{SubscriptionRef: core.SubscriptionRef{Name: "name1"}}, true) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -183,7 +183,7 @@ func TestUpsertSubscriptionFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertSubscription(context.Background(), &core.Subscription{SubscriptionRef: core.SubscriptionRef{Name: "name1"}}, true) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -194,7 +194,7 @@ func TestUpsertSubscriptionFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertSubscription(context.Background(), &core.Subscription{SubscriptionRef: core.SubscriptionRef{Name: "name1"}}, true) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -202,7 +202,7 @@ func TestGetSubscriptionByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetSubscriptionByName(context.Background(), "ns1", "name1") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -228,7 +228,7 @@ func TestGetSubscriptionQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.SubscriptionQueryFactory.NewFilter(context.Background()).Eq("name", "") _, _, err := s.GetSubscriptions(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -253,7 +253,7 @@ func TestSubscriptionUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.SubscriptionQueryFactory.NewUpdate(context.Background()).Set("name", "anything") err := s.UpdateSubscription(context.Background(), "ns1", "name1", u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestSubscriptionUpdateBuildQueryFail(t *testing.T) { @@ -274,7 +274,7 @@ func TestSubscriptionUpdateSelectFail(t *testing.T) { mock.ExpectRollback() u := database.SubscriptionQueryFactory.NewUpdate(context.Background()).Set("name", fftypes.NewUUID()) err := s.UpdateSubscription(context.Background(), "ns1", "name1", u) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) } func TestSubscriptionUpdateSelectNotFound(t *testing.T) { @@ -297,14 +297,14 @@ func TestSubscriptionUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.SubscriptionQueryFactory.NewUpdate(context.Background()).Set("name", fftypes.NewUUID()) err := s.UpdateSubscription(context.Background(), "ns1", "name1", u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } func TestSubscriptionDeleteBeginFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.DeleteSubscriptionByID(context.Background(), "ns1", fftypes.NewUUID()) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestSubscriptionDeleteFail(t *testing.T) { @@ -315,5 +315,5 @@ func TestSubscriptionDeleteFail(t *testing.T) { ) mock.ExpectExec("DELETE .*").WillReturnError(fmt.Errorf("pop")) err := s.DeleteSubscriptionByID(context.Background(), "ns1", fftypes.NewUUID()) - assert.Regexp(t, "FF10118", err) + assert.Regexp(t, "FF00179", err) } diff --git a/internal/database/sqlcommon/tokenapproval_sql.go b/internal/database/sqlcommon/tokenapproval_sql.go index 67ac093653..87c2e73ec6 100644 --- a/internal/database/sqlcommon/tokenapproval_sql.go +++ b/internal/database/sqlcommon/tokenapproval_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -64,13 +65,13 @@ var ( const tokenapprovalTable = "tokenapproval" func (s *SQLCommon) UpsertTokenApproval(ctx context.Context, approval *core.TokenApproval) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, tokenapprovalTable, tx, + defer s.RollbackTx(ctx, tx, autoCommit) + rows, _, err := s.QueryTx(ctx, tokenapprovalTable, tx, sq.Select("seq"). From(tokenapprovalTable). Where(sq.Eq{ @@ -85,7 +86,7 @@ func (s *SQLCommon) UpsertTokenApproval(ctx context.Context, approval *core.Toke rows.Close() if existing { - if _, err = s.updateTx(ctx, tokenapprovalTable, tx, + if _, err = s.UpdateTx(ctx, tokenapprovalTable, tx, sq.Update(tokenapprovalTable). Set("local_id", approval.LocalID). Set("subject", approval.Subject). @@ -108,7 +109,7 @@ func (s *SQLCommon) UpsertTokenApproval(ctx context.Context, approval *core.Toke } } else { approval.Created = fftypes.Now() - if _, err = s.insertTx(ctx, tokenapprovalTable, tx, + if _, err = s.InsertTx(ctx, tokenapprovalTable, tx, sq.Insert(tokenapprovalTable). Columns(tokenApprovalColumns...). Values( @@ -135,7 +136,7 @@ func (s *SQLCommon) UpsertTokenApproval(ctx context.Context, approval *core.Toke return err } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) tokenApprovalResult(ctx context.Context, row *sql.Rows) (*core.TokenApproval, error) { @@ -164,7 +165,7 @@ func (s *SQLCommon) tokenApprovalResult(ctx context.Context, row *sql.Rows) (*co } func (s *SQLCommon) getTokenApprovalPred(ctx context.Context, desc string, pred interface{}) (*core.TokenApproval, error) { - rows, _, err := s.query(ctx, tokenapprovalTable, + rows, _, err := s.Query(ctx, tokenapprovalTable, sq.Select(tokenApprovalColumns...). From(tokenapprovalTable). Where(pred), @@ -199,14 +200,14 @@ func (s *SQLCommon) GetTokenApprovalByProtocolID(ctx context.Context, namespace, }) } -func (s *SQLCommon) GetTokenApprovals(ctx context.Context, namespace string, filter database.Filter) (approvals []*core.TokenApproval, fr *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(tokenApprovalColumns...).From(tokenapprovalTable), +func (s *SQLCommon) GetTokenApprovals(ctx context.Context, namespace string, filter ffapi.Filter) (approvals []*core.TokenApproval, fr *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(tokenApprovalColumns...).From(tokenapprovalTable), filter, tokenApprovalFilterFieldMap, []interface{}{"seq"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokenapprovalTable, query) + rows, tx, err := s.Query(ctx, tokenapprovalTable, query) if err != nil { return nil, nil, err } @@ -221,30 +222,30 @@ func (s *SQLCommon) GetTokenApprovals(ctx context.Context, namespace string, fil approvals = append(approvals, d) } - return approvals, s.queryRes(ctx, tokenapprovalTable, tx, fop, fi), err + return approvals, s.QueryRes(ctx, tokenapprovalTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateTokenApprovals(ctx context.Context, filter database.Filter, update database.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) +func (s *SQLCommon) UpdateTokenApprovals(ctx context.Context, filter ffapi.Filter, update ffapi.Update) (err error) { + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(tokenapprovalTable), update, tokenApprovalFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(tokenapprovalTable), update, tokenApprovalFilterFieldMap) if err != nil { return err } - query, err = s.filterUpdate(ctx, query, filter, tokenApprovalFilterFieldMap) + query, err = s.FilterUpdate(ctx, query, filter, tokenApprovalFilterFieldMap) if err != nil { return err } - _, err = s.updateTx(ctx, tokenapprovalTable, tx, query, nil /* no change events filter based update */) + _, err = s.UpdateTx(ctx, tokenapprovalTable, tx, query, nil /* no change events filter based update */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/tokenapproval_sql_test.go b/internal/database/sqlcommon/tokenapproval_sql_test.go index ba6a2e83f6..39e112808b 100644 --- a/internal/database/sqlcommon/tokenapproval_sql_test.go +++ b/internal/database/sqlcommon/tokenapproval_sql_test.go @@ -125,7 +125,7 @@ func TestUpsertApprovalFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenApproval(context.Background(), &core.TokenApproval{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -134,7 +134,7 @@ func TestUpsertApprovalFailSelect(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenApproval(context.Background(), &core.TokenApproval{}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -145,7 +145,7 @@ func TestUpsertApprovalFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenApproval(context.Background(), &core.TokenApproval{}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -156,7 +156,7 @@ func TestUpsertApprovalFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenApproval(context.Background(), &core.TokenApproval{}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -167,7 +167,7 @@ func TestUpsertApprovalFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenApproval(context.Background(), &core.TokenApproval{}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -175,7 +175,7 @@ func TestGetApprovalByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetTokenApprovalByID(context.Background(), "ns1", fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -201,7 +201,7 @@ func TestGetApprovalsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenApprovalQueryFactory.NewFilter(context.Background()).Eq("subject", "") _, _, err := s.GetTokenApprovals(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } func TestGetApprovalsBuildQueryFail(t *testing.T) { @@ -227,7 +227,7 @@ func TestUpdateApprovalsFailBegin(t *testing.T) { f := database.TokenApprovalQueryFactory.NewFilter(context.Background()).Eq("subject", "test") u := database.TokenApprovalQueryFactory.NewUpdate(context.Background()).Set("active", false) err := s.UpdateTokenApprovals(context.Background(), f, u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -257,5 +257,5 @@ func TestUpdateApprovalsUpdateFail(t *testing.T) { f := database.TokenApprovalQueryFactory.NewFilter(context.Background()).Eq("subject", "test") u := database.TokenApprovalQueryFactory.NewUpdate(context.Background()).Set("active", false) err := s.UpdateTokenApprovals(context.Background(), f, u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/tokenbalance_sql.go b/internal/database/sqlcommon/tokenbalance_sql.go index b35572f7e7..73945c354b 100644 --- a/internal/database/sqlcommon/tokenbalance_sql.go +++ b/internal/database/sqlcommon/tokenbalance_sql.go @@ -21,12 +21,13 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) const tokenbalanceTable = "tokenbalance" @@ -48,7 +49,7 @@ var ( } ) -func (s *SQLCommon) addTokenBalance(ctx context.Context, tx *txWrapper, transfer *core.TokenTransfer, key string, negate bool) error { +func (s *SQLCommon) addTokenBalance(ctx context.Context, tx *dbsql.TXWrapper, transfer *core.TokenTransfer, key string, negate bool) error { balance, err := s.GetTokenBalance(ctx, transfer.Namespace, transfer.Pool, transfer.TokenIndex, key) if err != nil { return err @@ -67,7 +68,7 @@ func (s *SQLCommon) addTokenBalance(ctx context.Context, tx *txWrapper, transfer } if balance != nil { - if _, err = s.updateTx(ctx, tokenbalanceTable, tx, + if _, err = s.UpdateTx(ctx, tokenbalanceTable, tx, sq.Update(tokenbalanceTable). Set("uri", transfer.URI). Set("balance", total). @@ -83,7 +84,7 @@ func (s *SQLCommon) addTokenBalance(ctx context.Context, tx *txWrapper, transfer return err } } else { - if _, err = s.insertTx(ctx, tokenbalanceTable, tx, + if _, err = s.InsertTx(ctx, tokenbalanceTable, tx, sq.Insert(tokenbalanceTable). Columns(tokenBalanceColumns...). Values( @@ -106,11 +107,11 @@ func (s *SQLCommon) addTokenBalance(ctx context.Context, tx *txWrapper, transfer } func (s *SQLCommon) UpdateTokenBalances(ctx context.Context, transfer *core.TokenTransfer) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) if transfer.From != "" { if err := s.addTokenBalance(ctx, tx, transfer, transfer.From, true); err != nil { @@ -123,7 +124,7 @@ func (s *SQLCommon) UpdateTokenBalances(ctx context.Context, transfer *core.Toke } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) tokenBalanceResult(ctx context.Context, row *sql.Rows) (*core.TokenBalance, error) { @@ -145,7 +146,7 @@ func (s *SQLCommon) tokenBalanceResult(ctx context.Context, row *sql.Rows) (*cor } func (s *SQLCommon) getTokenBalancePred(ctx context.Context, desc string, pred interface{}) (*core.TokenBalance, error) { - rows, _, err := s.query(ctx, tokenbalanceTable, + rows, _, err := s.Query(ctx, tokenbalanceTable, sq.Select(tokenBalanceColumns...). From(tokenbalanceTable). Where(pred), @@ -178,14 +179,14 @@ func (s *SQLCommon) GetTokenBalance(ctx context.Context, namespace string, poolI }) } -func (s *SQLCommon) GetTokenBalances(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenBalance, *database.FilterResult, error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(tokenBalanceColumns...).From(tokenbalanceTable), +func (s *SQLCommon) GetTokenBalances(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenBalance, *ffapi.FilterResult, error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(tokenBalanceColumns...).From(tokenbalanceTable), filter, tokenBalanceFilterFieldMap, []interface{}{"seq"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokenbalanceTable, query) + rows, tx, err := s.Query(ctx, tokenbalanceTable, query) if err != nil { return nil, nil, err } @@ -200,18 +201,18 @@ func (s *SQLCommon) GetTokenBalances(ctx context.Context, namespace string, filt accounts = append(accounts, d) } - return accounts, s.queryRes(ctx, tokenbalanceTable, tx, fop, fi), err + return accounts, s.QueryRes(ctx, tokenbalanceTable, tx, fop, fi), err } -func (s *SQLCommon) GetTokenAccounts(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenAccount, *database.FilterResult, error) { - query, fop, fi, err := s.filterSelect(ctx, "", +func (s *SQLCommon) GetTokenAccounts(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenAccount, *ffapi.FilterResult, error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select("key", "MAX(updated) AS updated", "MAX(seq) AS seq").From(tokenbalanceTable).GroupBy("key"), filter, tokenBalanceFilterFieldMap, []interface{}{"seq"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokenbalanceTable, query) + rows, tx, err := s.Query(ctx, tokenbalanceTable, query) if err != nil { return nil, nil, err } @@ -228,18 +229,18 @@ func (s *SQLCommon) GetTokenAccounts(ctx context.Context, namespace string, filt accounts = append(accounts, &account) } - return accounts, s.queryRes(ctx, tokenbalanceTable, tx, fop, fi), err + return accounts, s.QueryRes(ctx, tokenbalanceTable, tx, fop, fi), err } -func (s *SQLCommon) GetTokenAccountPools(ctx context.Context, namespace, key string, filter database.Filter) ([]*core.TokenAccountPool, *database.FilterResult, error) { - query, fop, fi, err := s.filterSelect(ctx, "", +func (s *SQLCommon) GetTokenAccountPools(ctx context.Context, namespace, key string, filter ffapi.Filter) ([]*core.TokenAccountPool, *ffapi.FilterResult, error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select("pool_id", "MAX(updated) AS updated", "MAX(seq) AS seq").From(tokenbalanceTable).GroupBy("pool_id"), filter, tokenBalanceFilterFieldMap, []interface{}{"seq"}, sq.Eq{"key": key, "namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokenbalanceTable, query) + rows, tx, err := s.Query(ctx, tokenbalanceTable, query) if err != nil { return nil, nil, err } @@ -256,5 +257,5 @@ func (s *SQLCommon) GetTokenAccountPools(ctx context.Context, namespace, key str pools = append(pools, &pool) } - return pools, s.queryRes(ctx, tokenbalanceTable, tx, fop, fi), err + return pools, s.QueryRes(ctx, tokenbalanceTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/tokenbalance_sql_test.go b/internal/database/sqlcommon/tokenbalance_sql_test.go index f0e7f0d026..ea4765626e 100644 --- a/internal/database/sqlcommon/tokenbalance_sql_test.go +++ b/internal/database/sqlcommon/tokenbalance_sql_test.go @@ -137,7 +137,7 @@ func TestUpdateTokenBalancesFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -146,7 +146,7 @@ func TestUpdateTokenBalancesFailSelect(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{To: "0x0"}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -157,7 +157,7 @@ func TestUpdateTokenBalancesFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{From: "0x0"}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -168,7 +168,7 @@ func TestUpdateTokenBalancesFailInsert2(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{To: "0x0"}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -179,7 +179,7 @@ func TestUpdateTokenBalancesFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{To: "0x0"}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -190,7 +190,7 @@ func TestUpdateTokenBalancesFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpdateTokenBalances(context.Background(), &core.TokenTransfer{To: "0x0"}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -216,7 +216,7 @@ func TestGetTokenBalancesQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenBalanceQueryFactory.NewFilter(context.Background()).Eq("pool", "") _, _, err := s.GetTokenBalances(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -241,7 +241,7 @@ func TestGetTokenAccountsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenBalanceQueryFactory.NewFilter(context.Background()).And() _, _, err := s.GetTokenAccounts(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -266,7 +266,7 @@ func TestGetTokenAccountPoolsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenBalanceQueryFactory.NewFilter(context.Background()).And() _, _, err := s.GetTokenAccountPools(context.Background(), "ns1", "0x1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/tokenpool_sql.go b/internal/database/sqlcommon/tokenpool_sql.go index e0c49f2e86..b06e32d1b5 100644 --- a/internal/database/sqlcommon/tokenpool_sql.go +++ b/internal/database/sqlcommon/tokenpool_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -57,13 +58,13 @@ var ( const tokenpoolTable = "tokenpool" func (s *SQLCommon) UpsertTokenPool(ctx context.Context, pool *core.TokenPool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, tokenpoolTable, tx, + rows, _, err := s.QueryTx(ctx, tokenpoolTable, tx, sq.Select("id"). From(tokenpoolTable). Where(sq.Eq{ @@ -88,7 +89,7 @@ func (s *SQLCommon) UpsertTokenPool(ctx context.Context, pool *core.TokenPool) ( rows.Close() if existing { - if _, err = s.updateTx(ctx, tokenpoolTable, tx, + if _, err = s.UpdateTx(ctx, tokenpoolTable, tx, sq.Update(tokenpoolTable). Set("name", pool.Name). Set("standard", pool.Standard). @@ -111,7 +112,7 @@ func (s *SQLCommon) UpsertTokenPool(ctx context.Context, pool *core.TokenPool) ( } } else { pool.Created = fftypes.Now() - if _, err = s.insertTx(ctx, tokenpoolTable, tx, + if _, err = s.InsertTx(ctx, tokenpoolTable, tx, sq.Insert(tokenpoolTable). Columns(tokenPoolColumns...). Values( @@ -139,7 +140,7 @@ func (s *SQLCommon) UpsertTokenPool(ctx context.Context, pool *core.TokenPool) ( } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) tokenPoolResult(ctx context.Context, row *sql.Rows) (*core.TokenPool, error) { @@ -168,7 +169,7 @@ func (s *SQLCommon) tokenPoolResult(ctx context.Context, row *sql.Rows) (*core.T } func (s *SQLCommon) getTokenPoolPred(ctx context.Context, desc string, pred interface{}) (*core.TokenPool, error) { - rows, _, err := s.query(ctx, tokenpoolTable, + rows, _, err := s.Query(ctx, tokenpoolTable, sq.Select(tokenPoolColumns...). From(tokenpoolTable). Where(pred), @@ -207,14 +208,14 @@ func (s *SQLCommon) GetTokenPoolByLocator(ctx context.Context, namespace, connec }) } -func (s *SQLCommon) GetTokenPools(ctx context.Context, namespace string, filter database.Filter) (message []*core.TokenPool, fr *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(tokenPoolColumns...).From("tokenpool"), +func (s *SQLCommon) GetTokenPools(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.TokenPool, fr *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(tokenPoolColumns...).From("tokenpool"), filter, tokenPoolFilterFieldMap, []interface{}{"seq"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokenpoolTable, query) + rows, tx, err := s.Query(ctx, tokenpoolTable, query) if err != nil { return nil, nil, err } @@ -229,5 +230,5 @@ func (s *SQLCommon) GetTokenPools(ctx context.Context, namespace string, filter pools = append(pools, d) } - return pools, s.queryRes(ctx, tokenpoolTable, tx, fop, fi), err + return pools, s.QueryRes(ctx, tokenpoolTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/tokenpool_sql_test.go b/internal/database/sqlcommon/tokenpool_sql_test.go index 9cbb14fb9c..2b08132562 100644 --- a/internal/database/sqlcommon/tokenpool_sql_test.go +++ b/internal/database/sqlcommon/tokenpool_sql_test.go @@ -126,7 +126,7 @@ func TestUpsertTokenPoolFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenPool(context.Background(), &core.TokenPool{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -135,7 +135,7 @@ func TestUpsertTokenPoolFailSelect(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenPool(context.Background(), &core.TokenPool{}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -146,7 +146,7 @@ func TestUpsertTokenPoolFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenPool(context.Background(), &core.TokenPool{}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -157,7 +157,7 @@ func TestUpsertTokenPoolFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenPool(context.Background(), &core.TokenPool{}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -168,7 +168,7 @@ func TestUpsertTokenPoolFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenPool(context.Background(), &core.TokenPool{}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -195,7 +195,7 @@ func TestGetTokenPoolByIDSelectFail(t *testing.T) { poolID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetTokenPoolByID(context.Background(), "ns1", poolID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -223,7 +223,7 @@ func TestGetTokenPoolsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenPoolQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetTokenPools(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/tokentransfer_sql.go b/internal/database/sqlcommon/tokentransfer_sql.go index de23423656..4673c1f9a8 100644 --- a/internal/database/sqlcommon/tokentransfer_sql.go +++ b/internal/database/sqlcommon/tokentransfer_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -69,13 +70,13 @@ var ( const tokentransferTable = "tokentransfer" func (s *SQLCommon) UpsertTokenTransfer(ctx context.Context, transfer *core.TokenTransfer) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - rows, _, err := s.queryTx(ctx, tokentransferTable, tx, + rows, _, err := s.QueryTx(ctx, tokentransferTable, tx, sq.Select("seq"). From(tokentransferTable). Where(sq.Eq{ @@ -90,7 +91,7 @@ func (s *SQLCommon) UpsertTokenTransfer(ctx context.Context, transfer *core.Toke rows.Close() if existing { - if _, err = s.updateTx(ctx, tokentransferTable, tx, + if _, err = s.UpdateTx(ctx, tokentransferTable, tx, sq.Update(tokentransferTable). Set("type", transfer.Type). Set("local_id", transfer.LocalID). @@ -116,7 +117,7 @@ func (s *SQLCommon) UpsertTokenTransfer(ctx context.Context, transfer *core.Toke } } else { transfer.Created = fftypes.Now() - if _, err = s.insertTx(ctx, tokentransferTable, tx, + if _, err = s.InsertTx(ctx, tokentransferTable, tx, sq.Insert(tokentransferTable). Columns(tokenTransferColumns...). Values( @@ -147,7 +148,7 @@ func (s *SQLCommon) UpsertTokenTransfer(ctx context.Context, transfer *core.Toke } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) tokenTransferResult(ctx context.Context, row *sql.Rows) (*core.TokenTransfer, error) { @@ -179,7 +180,7 @@ func (s *SQLCommon) tokenTransferResult(ctx context.Context, row *sql.Rows) (*co } func (s *SQLCommon) getTokenTransferPred(ctx context.Context, desc string, pred interface{}) (*core.TokenTransfer, error) { - rows, _, err := s.query(ctx, tokentransferTable, + rows, _, err := s.Query(ctx, tokentransferTable, sq.Select(tokenTransferColumns...). From(tokentransferTable). Where(pred), @@ -214,14 +215,14 @@ func (s *SQLCommon) GetTokenTransferByProtocolID(ctx context.Context, namespace, }) } -func (s *SQLCommon) GetTokenTransfers(ctx context.Context, namespace string, filter database.Filter) (message []*core.TokenTransfer, fr *database.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(tokenTransferColumns...).From(tokentransferTable), +func (s *SQLCommon) GetTokenTransfers(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.TokenTransfer, fr *ffapi.FilterResult, err error) { + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(tokenTransferColumns...).From(tokentransferTable), filter, tokenTransferFilterFieldMap, []interface{}{"seq"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, tokentransferTable, query) + rows, tx, err := s.Query(ctx, tokentransferTable, query) if err != nil { return nil, nil, err } @@ -236,5 +237,5 @@ func (s *SQLCommon) GetTokenTransfers(ctx context.Context, namespace string, fil transfers = append(transfers, d) } - return transfers, s.queryRes(ctx, tokentransferTable, tx, fop, fi), err + return transfers, s.QueryRes(ctx, tokentransferTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/tokentransfer_sql_test.go b/internal/database/sqlcommon/tokentransfer_sql_test.go index 352359869c..072b7487b3 100644 --- a/internal/database/sqlcommon/tokentransfer_sql_test.go +++ b/internal/database/sqlcommon/tokentransfer_sql_test.go @@ -119,7 +119,7 @@ func TestUpsertTokenTransferFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenTransfer(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -128,7 +128,7 @@ func TestUpsertTokenTransferFailSelect(t *testing.T) { mock.ExpectBegin() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenTransfer(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -139,7 +139,7 @@ func TestUpsertTokenTransferFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenTransfer(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -150,7 +150,7 @@ func TestUpsertTokenTransferFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertTokenTransfer(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -161,7 +161,7 @@ func TestUpsertTokenTransferFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertTokenTransfer(context.Background(), &core.TokenTransfer{}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -169,7 +169,7 @@ func TestGetTokenTransferByIDSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetTokenTransferByID(context.Background(), "ns1", fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -195,7 +195,7 @@ func TestGetTokenTransfersQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TokenTransferQueryFactory.NewFilter(context.Background()).Eq("protocolid", "") _, _, err := s.GetTokenTransfers(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/database/sqlcommon/transaction_sql.go b/internal/database/sqlcommon/transaction_sql.go index 6038acd418..4d05045f2f 100644 --- a/internal/database/sqlcommon/transaction_sql.go +++ b/internal/database/sqlcommon/transaction_sql.go @@ -21,6 +21,7 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -48,15 +49,15 @@ var ( const transactionsTable = "transactions" func (s *SQLCommon) InsertTransaction(ctx context.Context, transaction *core.Transaction) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) transaction.Created = fftypes.Now() var seq int64 - if seq, err = s.insertTxExt(ctx, transactionsTable, tx, + if seq, err = s.InsertTxExt(ctx, transactionsTable, tx, sq.Insert(transactionsTable). Columns(transactionColumns...). Values( @@ -86,7 +87,7 @@ func (s *SQLCommon) InsertTransaction(ctx context.Context, transaction *core.Tra return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) transactionResult(ctx context.Context, row *sql.Rows) (*core.Transaction, error) { @@ -107,7 +108,7 @@ func (s *SQLCommon) transactionResult(ctx context.Context, row *sql.Rows) (*core func (s *SQLCommon) GetTransactionByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Transaction, err error) { - rows, _, err := s.query(ctx, transactionsTable, + rows, _, err := s.Query(ctx, transactionsTable, sq.Select(transactionColumns...). From(transactionsTable). Where(sq.Eq{"id": id, "namespace": namespace}), @@ -130,14 +131,14 @@ func (s *SQLCommon) GetTransactionByID(ctx context.Context, namespace string, id return transaction, nil } -func (s *SQLCommon) GetTransactions(ctx context.Context, namespace string, filter database.Filter) (message []*core.Transaction, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetTransactions(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Transaction, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(transactionColumns...).From(transactionsTable), filter, transactionFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(transactionColumns...).From(transactionsTable), filter, transactionFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, transactionsTable, query) + rows, tx, err := s.Query(ctx, transactionsTable, query) if err != nil { return nil, nil, err } @@ -152,28 +153,28 @@ func (s *SQLCommon) GetTransactions(ctx context.Context, namespace string, filte transactions = append(transactions, transaction) } - return transactions, s.queryRes(ctx, transactionsTable, tx, fop, fi), err + return transactions, s.QueryRes(ctx, transactionsTable, tx, fop, fi), err } -func (s *SQLCommon) UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) (err error) { +func (s *SQLCommon) UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update(transactionsTable), update, transactionFilterFieldMap) + query, err := s.BuildUpdate(sq.Update(transactionsTable), update, transactionFilterFieldMap) if err != nil { return err } query = query.Where(sq.Eq{"id": id, "namespace": namespace}) - _, err = s.updateTx(ctx, transactionsTable, tx, query, nil /* no change evnents for filter based updates */) + _, err = s.UpdateTx(ctx, transactionsTable, tx, query, nil /* no change evnents for filter based updates */) if err != nil { return err } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } diff --git a/internal/database/sqlcommon/transaction_sql_test.go b/internal/database/sqlcommon/transaction_sql_test.go index 1eb7a79672..4d378834bb 100644 --- a/internal/database/sqlcommon/transaction_sql_test.go +++ b/internal/database/sqlcommon/transaction_sql_test.go @@ -42,7 +42,7 @@ func TestTransactionE2EWithDB(t *testing.T) { ID: transactionID, Type: core.TransactionTypeBatchPin, Namespace: "ns1", - BlockchainIDs: core.FFStringArray{"tx1"}, + BlockchainIDs: fftypes.FFStringArray{"tx1"}, } s.callbacks.On("UUIDCollectionNSEvent", database.CollectionTransactions, core.ChangeEventTypeCreated, "ns1", transactionID, mock.Anything).Return() @@ -83,7 +83,7 @@ func TestTransactionE2EWithDB(t *testing.T) { // Update up := database.TransactionQueryFactory.NewUpdate(ctx). - Set("blockchainids", core.FFStringArray{"0x12345", "0x23456"}). + Set("blockchainids", fftypes.FFStringArray{"0x12345", "0x23456"}). Set("idempotencykey", "testKey") err = s.UpdateTransaction(ctx, "ns1", transaction.ID, up) assert.NoError(t, err) @@ -112,7 +112,7 @@ func TestInsertTransactionFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.InsertTransaction(context.Background(), &core.Transaction{}) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -123,7 +123,7 @@ func TestInsertTransactionFailInsert(t *testing.T) { mock.ExpectRollback() transactionID := fftypes.NewUUID() err := s.InsertTransaction(context.Background(), &core.Transaction{ID: transactionID}) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -146,7 +146,7 @@ func TestInsertTransactionFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.InsertTransaction(context.Background(), &core.Transaction{ID: transactionID}) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -155,7 +155,7 @@ func TestGetTransactionByIDSelectFail(t *testing.T) { transactionID := fftypes.NewUUID() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetTransactionByID(context.Background(), "ns1", transactionID) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -183,7 +183,7 @@ func TestGetTransactionsQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.TransactionQueryFactory.NewFilter(context.Background()).Eq("id", "") _, _, err := s.GetTransactions(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -208,7 +208,7 @@ func TestTransactionUpdateBeginFail(t *testing.T) { mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) u := database.TransactionQueryFactory.NewUpdate(context.Background()).Set("id", "anything") err := s.UpdateTransaction(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) } func TestTransactionUpdateBuildQueryFail(t *testing.T) { @@ -226,5 +226,5 @@ func TestTransactionUpdateFail(t *testing.T) { mock.ExpectRollback() u := database.TransactionQueryFactory.NewUpdate(context.Background()).Set("id", fftypes.NewUUID()) err := s.UpdateTransaction(context.Background(), "ns1", fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) } diff --git a/internal/database/sqlcommon/verifier_sql.go b/internal/database/sqlcommon/verifier_sql.go index d62239d024..c819b90326 100644 --- a/internal/database/sqlcommon/verifier_sql.go +++ b/internal/database/sqlcommon/verifier_sql.go @@ -21,6 +21,8 @@ import ( "database/sql" sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly-common/pkg/dbsql" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -45,8 +47,8 @@ var ( const verifiersTable = "verifiers" -func (s *SQLCommon) attemptVerifierUpdate(ctx context.Context, tx *txWrapper, verifier *core.Verifier) (int64, error) { - return s.updateTx(ctx, verifiersTable, tx, +func (s *SQLCommon) attemptVerifierUpdate(ctx context.Context, tx *dbsql.TXWrapper, verifier *core.Verifier) (int64, error) { + return s.UpdateTx(ctx, verifiersTable, tx, sq.Update(verifiersTable). Set("identity", verifier.Identity). Set("vtype", verifier.Type). @@ -59,9 +61,9 @@ func (s *SQLCommon) attemptVerifierUpdate(ctx context.Context, tx *txWrapper, ve }) } -func (s *SQLCommon) attemptVerifierInsert(ctx context.Context, tx *txWrapper, verifier *core.Verifier, requestConflictEmptyResult bool) (err error) { +func (s *SQLCommon) attemptVerifierInsert(ctx context.Context, tx *dbsql.TXWrapper, verifier *core.Verifier, requestConflictEmptyResult bool) (err error) { verifier.Created = fftypes.Now() - _, err = s.insertTxExt(ctx, verifiersTable, tx, + _, err = s.InsertTxExt(ctx, verifiersTable, tx, sq.Insert(verifiersTable). Columns(verifierColumns...). Values( @@ -79,11 +81,11 @@ func (s *SQLCommon) attemptVerifierInsert(ctx context.Context, tx *txWrapper, ve } func (s *SQLCommon) UpsertVerifier(ctx context.Context, verifier *core.Verifier, optimization database.UpsertOptimization) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + ctx, tx, autoCommit, err := s.BeginOrUseTx(ctx) if err != nil { return err } - defer s.rollbackTx(ctx, tx, autoCommit) + defer s.RollbackTx(ctx, tx, autoCommit) optimized := false if optimization == database.UpsertOptimizationNew { @@ -96,7 +98,7 @@ func (s *SQLCommon) UpsertVerifier(ctx context.Context, verifier *core.Verifier, if !optimized { // Do a select within the transaction to detemine if the UUID already exists - msgRows, _, err := s.queryTx(ctx, verifiersTable, tx, + msgRows, _, err := s.QueryTx(ctx, verifiersTable, tx, sq.Select("hash"). From(verifiersTable). Where(sq.Eq{ @@ -122,7 +124,7 @@ func (s *SQLCommon) UpsertVerifier(ctx context.Context, verifier *core.Verifier, } } - return s.commitTx(ctx, tx, autoCommit) + return s.CommitTx(ctx, tx, autoCommit) } func (s *SQLCommon) verifierResult(ctx context.Context, row *sql.Rows) (*core.Verifier, error) { @@ -143,7 +145,7 @@ func (s *SQLCommon) verifierResult(ctx context.Context, row *sql.Rows) (*core.Ve func (s *SQLCommon) getVerifierPred(ctx context.Context, desc string, pred interface{}) (verifier *core.Verifier, err error) { - rows, _, err := s.query(ctx, verifiersTable, + rows, _, err := s.Query(ctx, verifiersTable, sq.Select(verifierColumns...). From(verifiersTable). Where(pred), @@ -169,14 +171,14 @@ func (s *SQLCommon) GetVerifierByHash(ctx context.Context, namespace string, has return s.getVerifierPred(ctx, hash.String(), sq.Eq{"hash": hash, "namespace": namespace}) } -func (s *SQLCommon) GetVerifiers(ctx context.Context, namespace string, filter database.Filter) (verifiers []*core.Verifier, fr *database.FilterResult, err error) { +func (s *SQLCommon) GetVerifiers(ctx context.Context, namespace string, filter ffapi.Filter) (verifiers []*core.Verifier, fr *ffapi.FilterResult, err error) { - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(verifierColumns...).From(verifiersTable), filter, verifierFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) + query, fop, fi, err := s.FilterSelect(ctx, "", sq.Select(verifierColumns...).From(verifiersTable), filter, verifierFilterFieldMap, []interface{}{"sequence"}, sq.Eq{"namespace": namespace}) if err != nil { return nil, nil, err } - rows, tx, err := s.query(ctx, verifiersTable, query) + rows, tx, err := s.Query(ctx, verifiersTable, query) if err != nil { return nil, nil, err } @@ -191,6 +193,6 @@ func (s *SQLCommon) GetVerifiers(ctx context.Context, namespace string, filter d verifiers = append(verifiers, d) } - return verifiers, s.queryRes(ctx, verifiersTable, tx, fop, fi), err + return verifiers, s.QueryRes(ctx, verifiersTable, tx, fop, fi), err } diff --git a/internal/database/sqlcommon/verifier_sql_test.go b/internal/database/sqlcommon/verifier_sql_test.go index 0544eb8939..a6e70ee1e8 100644 --- a/internal/database/sqlcommon/verifier_sql_test.go +++ b/internal/database/sqlcommon/verifier_sql_test.go @@ -103,7 +103,7 @@ func TestUpsertVerifierFailBegin(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) err := s.UpsertVerifier(context.Background(), &core.Verifier{}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10114", err) + assert.Regexp(t, "FF00175", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -113,7 +113,7 @@ func TestUpsertVerifierFailSelect(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertVerifier(context.Background(), &core.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -124,7 +124,7 @@ func TestUpsertVerifierFailInsert(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertVerifier(context.Background(), &core.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10116", err) + assert.Regexp(t, "FF00177", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -136,7 +136,7 @@ func TestUpsertVerifierFailUpdate(t *testing.T) { mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) mock.ExpectRollback() err := s.UpsertVerifier(context.Background(), &core.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10117", err) + assert.Regexp(t, "FF00178", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -147,7 +147,7 @@ func TestUpsertVerifierFailCommit(t *testing.T) { mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) err := s.UpsertVerifier(context.Background(), &core.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) - assert.Regexp(t, "FF10119", err) + assert.Regexp(t, "FF00180", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -155,7 +155,7 @@ func TestGetVerifierByHashSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetVerifierByHash(context.Background(), "ns1", fftypes.NewRandB32()) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -163,7 +163,7 @@ func TestGetVerifierByNameSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetVerifierByValue(context.Background(), core.VerifierTypeEthAddress, "ff_system", "0x12345") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -171,7 +171,7 @@ func TestGetVerifierByVerifierSelectFail(t *testing.T) { s, mock := newMockProvider().init() mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) _, err := s.GetVerifierByValue(context.Background(), core.VerifierTypeEthAddress, "ff_system", "0x12345") - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } @@ -197,7 +197,7 @@ func TestGetVerifierQueryFail(t *testing.T) { mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) f := database.VerifierQueryFactory.NewFilter(context.Background()).Eq("value", "") _, _, err := s.GetVerifiers(context.Background(), "ns1", f) - assert.Regexp(t, "FF10115", err) + assert.Regexp(t, "FF00176", err) assert.NoError(t, mock.ExpectationsWereMet()) } diff --git a/internal/definitions/handler_identity_claim_test.go b/internal/definitions/handler_identity_claim_test.go index 0ce6db8ad4..1a3232d075 100644 --- a/internal/definitions/handler_identity_claim_test.go +++ b/internal/definitions/handler_identity_claim_test.go @@ -101,7 +101,7 @@ func testCustomClaimAndVerification(t *testing.T) (*core.Identity, *core.Identit ID: custom1.Messages.Claim, Type: core.MessageTypeDefinition, Tag: core.SystemTagIdentityClaim, - Topics: core.FFStringArray{custom1.Topic()}, + Topics: fftypes.FFStringArray{custom1.Topic()}, SignerRef: core.SignerRef{ Author: custom1.DID, Key: "0x12345", @@ -128,7 +128,7 @@ func testCustomClaimAndVerification(t *testing.T) (*core.Identity, *core.Identit ID: fftypes.NewUUID(), Type: core.MessageTypeDefinition, Tag: core.SystemTagIdentityVerification, - Topics: core.FFStringArray{custom1.Topic()}, + Topics: fftypes.FFStringArray{custom1.Topic()}, SignerRef: core.SignerRef{ Author: org1.DID, Key: "0x2456", diff --git a/internal/definitions/handler_identity_update_test.go b/internal/definitions/handler_identity_update_test.go index 3720d4b904..a1096a210a 100644 --- a/internal/definitions/handler_identity_update_test.go +++ b/internal/definitions/handler_identity_update_test.go @@ -56,7 +56,7 @@ func testIdentityUpdate(t *testing.T) (*core.Identity, *core.Message, *core.Data ID: fftypes.NewUUID(), Type: core.MessageTypeDefinition, Tag: core.SystemTagIdentityUpdate, - Topics: core.FFStringArray{org1.Topic()}, + Topics: fftypes.FFStringArray{org1.Topic()}, SignerRef: core.SignerRef{ Author: org1.DID, Key: "0x12345", @@ -234,7 +234,7 @@ func TestHandleDefinitionIdentityValidateFail(t *testing.T) { ID: fftypes.NewUUID(), Type: core.MessageTypeDefinition, Tag: core.SystemTagIdentityUpdate, - Topics: core.FFStringArray{org1.Topic()}, + Topics: fftypes.FFStringArray{org1.Topic()}, SignerRef: core.SignerRef{ Author: org1.DID, Key: "0x12345", @@ -259,7 +259,7 @@ func TestHandleDefinitionIdentityMissingData(t *testing.T) { ID: fftypes.NewUUID(), Type: core.MessageTypeDefinition, Tag: core.SystemTagIdentityUpdate, - Topics: core.FFStringArray{org1.Topic()}, + Topics: fftypes.FFStringArray{org1.Topic()}, SignerRef: core.SignerRef{ Author: org1.DID, Key: "0x12345", diff --git a/internal/definitions/sender.go b/internal/definitions/sender.go index 364c1097bc..2bf87a6886 100644 --- a/internal/definitions/sender.go +++ b/internal/definitions/sender.go @@ -129,7 +129,7 @@ func (bm *definitionSender) sendDefinitionCommon(ctx context.Context, def core.D Message: core.Message{ Header: core.MessageHeader{ Type: core.MessageTypeDefinition, - Topics: core.FFStringArray{def.Topic()}, + Topics: fftypes.FFStringArray{def.Topic()}, Tag: tag, TxType: core.TransactionTypeBatchPin, SignerRef: *signingIdentity, diff --git a/internal/events/aggregator_batch_state.go b/internal/events/aggregator_batch_state.go index eacb47a9f3..ce072cb86c 100644 --- a/internal/events/aggregator_batch_state.go +++ b/internal/events/aggregator_batch_state.go @@ -74,7 +74,7 @@ type dispatchedMessage struct { msgID *fftypes.UUID firstPinIndex int64 topicCount int - msgPins core.FFStringArray + msgPins fftypes.FFStringArray newState core.MessageState } diff --git a/internal/events/aggregator_batch_state_test.go b/internal/events/aggregator_batch_state_test.go index 3d411289be..bcf1ecb398 100644 --- a/internal/events/aggregator_batch_state_test.go +++ b/internal/events/aggregator_batch_state_test.go @@ -36,9 +36,9 @@ func TestFlushPinsFailUpdatePins(t *testing.T) { bs.markMessageDispatched(fftypes.NewUUID(), &core.Message{ Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, - Pins: core.FFStringArray{"pin1"}, + Pins: fftypes.FFStringArray{"pin1"}, }, 0, core.MessageStateConfirmed) err := bs.flushPins(ag.ctx) @@ -58,9 +58,9 @@ func TestFlushPinsFailUpdateMessages(t *testing.T) { bs.markMessageDispatched(fftypes.NewUUID(), &core.Message{ Header: core.MessageHeader{ ID: msgID, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, - Pins: core.FFStringArray{"pin1"}, + Pins: fftypes.FFStringArray{"pin1"}, }, 0, core.MessageStateConfirmed) err := bs.flushPins(ag.ctx) diff --git a/internal/events/aggregator_test.go b/internal/events/aggregator_test.go index c59a829870..f377924e34 100644 --- a/internal/events/aggregator_test.go +++ b/internal/events/aggregator_test.go @@ -125,7 +125,7 @@ func newTestManifest(mType core.MessageType, groupID *fftypes.Bytes32) (*core.Me ID: fftypes.NewUUID(), Namespace: "any", Group: groupID, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, SignerRef: core.SignerRef{Key: "0x12345", Author: org1.DID}, }, Data: core.DataRefs{ @@ -138,7 +138,7 @@ func newTestManifest(mType core.MessageType, groupID *fftypes.Bytes32) (*core.Me ID: fftypes.NewUUID(), Group: groupID, Namespace: "any", - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, SignerRef: core.SignerRef{Key: "0x12345", Author: org1.DID}, }, Data: core.DataRefs{ @@ -650,7 +650,7 @@ func TestAggregationMigratedBroadcastNilMessageID(t *testing.T) { Payload: core.BatchPayload{ Messages: []*core.Message{{ Header: core.MessageHeader{ - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, }}, }, @@ -704,7 +704,7 @@ func TestAggregationMigratedBroadcastInvalid(t *testing.T) { Payload: core.BatchPayload{ Messages: []*core.Message{{ Header: core.MessageHeader{ - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, }}, }, @@ -848,7 +848,7 @@ func TestProcessPinsBadMsgHeader(t *testing.T) { Messages: []*core.Message{ {Header: core.MessageHeader{ ID: nil, /* missing */ - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }, @@ -881,7 +881,7 @@ func TestProcessSkipDupMsg(t *testing.T) { Messages: []*core.Message{ {Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1", "topic2"}, + Topics: fftypes.FFStringArray{"topic1", "topic2"}, }}, }, }, @@ -920,7 +920,7 @@ func TestProcessMsgFailGetPins(t *testing.T) { Messages: []*core.Message{ {Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }}, }, }, @@ -990,10 +990,10 @@ func TestProcessMsgFailBadPin(t *testing.T) { Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, Hash: fftypes.NewRandB32(), - Pins: core.FFStringArray{"!Wrong"}, + Pins: fftypes.FFStringArray{"!Wrong"}, } ag.mdm.On("GetMessageWithDataCached", ag.ctx, mock.Anything, data.CRORequirePins).Return(msg, nil, true, nil) @@ -1019,9 +1019,9 @@ func TestProcessMsgFailGetNextPins(t *testing.T) { Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, - Pins: core.FFStringArray{fftypes.NewRandB32().String()}, + Pins: fftypes.FFStringArray{fftypes.NewRandB32().String()}, } ag.mdm.On("GetMessageWithDataCached", ag.ctx, mock.Anything, data.CRORequirePins).Return(msg, nil, true, nil) @@ -1046,12 +1046,12 @@ func TestProcessMsgFailDispatch(t *testing.T) { msg := &core.Message{ Header: core.MessageHeader{ ID: fftypes.NewUUID(), - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, SignerRef: core.SignerRef{ Key: "0x12345", }, }, - Pins: core.FFStringArray{fftypes.NewRandB32().String()}, + Pins: fftypes.FFStringArray{fftypes.NewRandB32().String()}, } ag.mdm.On("GetMessageWithDataCached", ag.ctx, mock.Anything, data.CRORequirePublicBlobRefs).Return(msg, nil, true, nil) @@ -1081,14 +1081,14 @@ func TestProcessMsgFailPinUpdate(t *testing.T) { Header: core.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, Namespace: "ns1", SignerRef: core.SignerRef{ Author: org1.DID, Key: "0x12345", }, }, - Pins: core.FFStringArray{pin.String()}, + Pins: fftypes.FFStringArray{pin.String()}, } ag.mim.On("FindIdentityForVerifier", ag.ctx, []core.IdentityType{core.IdentityTypeOrg, core.IdentityTypeCustom}, &core.VerifierRef{ @@ -1133,7 +1133,7 @@ func TestCheckMaskedContextReadyMismatchedAuthor(t *testing.T) { ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), Tag: core.SystemTagDefineDatatype, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, SignerRef: core.SignerRef{ Author: "author1", Key: "0x12345", @@ -1605,7 +1605,7 @@ func TestDefinitionBroadcastActionRejectCustomCorrelator(t *testing.T) { Namespace: "any", SignerRef: core.SignerRef{Key: "0x12345", Author: org1.DID}, Tag: core.SystemTagDefineDatatype, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, Data: core.DataRefs{ {ID: fftypes.NewUUID()}, @@ -1704,8 +1704,8 @@ func TestDispatchPrivateQueuesLaterDispatch(t *testing.T) { }, nil).Once() ag.mdi.On("GetBlobMatchingHash", ag.ctx, data2[0].Blob.Hash).Return(nil, nil) - msg1.Pins = core.FFStringArray{member1NonceOne.String()} - msg2.Pins = core.FFStringArray{member1NonceTwo.String()} + msg1.Pins = fftypes.FFStringArray{member1NonceOne.String()} + msg2.Pins = fftypes.FFStringArray{member1NonceTwo.String()} // First message should dispatch pin1 := &core.Pin{Masked: true, Sequence: 12345, Signer: msg1.Header.Key} @@ -1745,8 +1745,8 @@ func TestDispatchPrivateNextPinIncremented(t *testing.T) { {Context: context, Nonce: 1 /* match member1NonceOne */, Identity: org1.DID, Hash: member1NonceOne}, }, nil) - msg1.Pins = core.FFStringArray{member1NonceOne.String()} - msg2.Pins = core.FFStringArray{member1NonceTwo.String()} + msg1.Pins = fftypes.FFStringArray{member1NonceOne.String()} + msg2.Pins = fftypes.FFStringArray{member1NonceTwo.String()} // First message should dispatch err := ag.processMessage(ag.ctx, manifest, &core.Pin{Masked: true, Sequence: 12345, Signer: "0x12345"}, 0, manifest.Messages[0], &core.BatchPersisted{}, bs) diff --git a/internal/events/batch_pin_complete_test.go b/internal/events/batch_pin_complete_test.go index 0065a56130..873af20da0 100644 --- a/internal/events/batch_pin_complete_test.go +++ b/internal/events/batch_pin_complete_test.go @@ -52,7 +52,7 @@ func sampleBatch(t *testing.T, batchType core.BatchType, txType core.Transaction ID: fftypes.NewUUID(), Type: msgType, TxType: txType, - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, }, Data: data.Refs(), } diff --git a/internal/events/event_dispatcher_test.go b/internal/events/event_dispatcher_test.go index a00179ee3d..4b557e1084 100644 --- a/internal/events/event_dispatcher_test.go +++ b/internal/events/event_dispatcher_test.go @@ -423,7 +423,7 @@ func TestFilterEventsMatch(t *testing.T) { }, Message: &core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, Tag: "tag1", Group: nil, SignerRef: core.SignerRef{ @@ -443,7 +443,7 @@ func TestFilterEventsMatch(t *testing.T) { }, Message: &core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{"topic1"}, + Topics: fftypes.FFStringArray{"topic1"}, Tag: "tag2", Group: gid1, SignerRef: core.SignerRef{ @@ -463,7 +463,7 @@ func TestFilterEventsMatch(t *testing.T) { }, Message: &core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{"topic2"}, + Topics: fftypes.FFStringArray{"topic2"}, Tag: "tag1", Group: nil, SignerRef: core.SignerRef{ diff --git a/internal/privatemessaging/groupmanager.go b/internal/privatemessaging/groupmanager.go index 26244edffc..14f6206ced 100644 --- a/internal/privatemessaging/groupmanager.go +++ b/internal/privatemessaging/groupmanager.go @@ -134,7 +134,7 @@ func (gm *groupManager) groupInit(ctx context.Context, signer *core.SignerRef, g Type: core.MessageTypeGroupInit, SignerRef: *signer, Tag: core.SystemTagDefineGroup, - Topics: core.FFStringArray{group.Topic()}, + Topics: fftypes.FFStringArray{group.Topic()}, TxType: core.TransactionTypeBatchPin, }, Data: core.DataRefs{ diff --git a/internal/reference/reference.go b/internal/reference/reference.go index 6ae0604295..b22999fdf9 100644 --- a/internal/reference/reference.go +++ b/internal/reference/reference.go @@ -138,7 +138,7 @@ func GenerateObjectsReferenceMarkdown(ctx context.Context) (map[string][]byte, e Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.UnixTime(1652664195), - BlockchainIDs: core.NewFFStringArray("0x34b0327567fefed09ac7b4429549bc609302b08a9cbd8f019a078ec44447593d"), + BlockchainIDs: fftypes.NewFFStringArray("0x34b0327567fefed09ac7b4429549bc609302b08a9cbd8f019a078ec44447593d"), }, &core.Operation{ @@ -392,7 +392,7 @@ func GenerateObjectsReferenceMarkdown(ctx context.Context) (map[string][]byte, e Created: fftypes.UnixTime(1652664190), Group: fftypes.HashString("testgroup"), Namespace: "ns1", - Topics: core.NewFFStringArray("topic1"), + Topics: fftypes.NewFFStringArray("topic1"), Tag: "blue_message", DataHash: fftypes.HashString("testmsghash"), }, diff --git a/internal/txcommon/txcommon.go b/internal/txcommon/txcommon.go index e04bbd297f..a17e730e77 100644 --- a/internal/txcommon/txcommon.go +++ b/internal/txcommon/txcommon.go @@ -151,7 +151,7 @@ func (t *transactionHelper) PersistTransaction(ctx context.Context, id *fftypes. ID: id, Namespace: t.namespace, Type: txType, - BlockchainIDs: core.NewFFStringArray(strings.ToLower(blockchainTXID)), + BlockchainIDs: fftypes.NewFFStringArray(strings.ToLower(blockchainTXID)), } if err = t.database.InsertTransaction(ctx, tx); err != nil { return false, err diff --git a/internal/txcommon/txcommon_test.go b/internal/txcommon/txcommon_test.go index 9a711d4ef3..5972333e07 100644 --- a/internal/txcommon/txcommon_test.go +++ b/internal/txcommon/txcommon_test.go @@ -165,7 +165,7 @@ func TestPersistTransactionNew(t *testing.T) { assert.Equal(t, txid, transaction.ID) assert.Equal(t, "ns1", transaction.Namespace) assert.Equal(t, core.TransactionTypeBatchPin, transaction.Type) - assert.Equal(t, core.FFStringArray{"0x222222"}, transaction.BlockchainIDs) + assert.Equal(t, fftypes.FFStringArray{"0x222222"}, transaction.BlockchainIDs) return true })).Return(nil) @@ -213,7 +213,7 @@ func TestPersistTransactionExistingAddBlockchainID(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeBatchPin, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil) mdi.On("UpdateTransaction", ctx, "ns1", txid, mock.Anything).Return(nil) @@ -240,7 +240,7 @@ func TestPersistTransactionExistingUpdateFail(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeBatchPin, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil) mdi.On("UpdateTransaction", ctx, "ns1", txid, mock.Anything).Return(fmt.Errorf("pop")) @@ -280,7 +280,7 @@ func TestPersistTransactionExistingNoChange(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeBatchPin, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil) valid, err := txHelper.PersistTransaction(ctx, txid, core.TransactionTypeBatchPin, "0x111111") @@ -306,7 +306,7 @@ func TestPersistTransactionExistingNoBlockchainID(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeBatchPin, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil) valid, err := txHelper.PersistTransaction(ctx, txid, core.TransactionTypeBatchPin, "") @@ -352,7 +352,7 @@ func TestPersistTransactionExistingMismatchType(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil) valid, err := txHelper.PersistTransaction(ctx, txid, core.TransactionTypeBatchPin, "") @@ -377,7 +377,7 @@ func TestAddBlockchainTX(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, } mdi.On("UpdateTransaction", ctx, "ns1", tx.ID, mock.MatchedBy(func(u database.Update) bool { info, _ := u.Finalize() @@ -409,7 +409,7 @@ func TestAddBlockchainTXUpdateFail(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, } mdi.On("UpdateTransaction", ctx, "ns1", tx.ID, mock.Anything).Return(fmt.Errorf("pop")) @@ -434,7 +434,7 @@ func TestAddBlockchainTXUnchanged(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, } err := txHelper.AddBlockchainTX(ctx, tx, "0x111111") @@ -455,7 +455,7 @@ func TestGetTransactionByIDCached(t *testing.T) { Namespace: "ns1", Type: core.TransactionTypeContractInvoke, Created: fftypes.Now(), - BlockchainIDs: core.FFStringArray{"0x111111"}, + BlockchainIDs: fftypes.FFStringArray{"0x111111"}, }, nil).Once() tx, err := txHelper.GetTransactionByIDCached(ctx, txid) diff --git a/mocks/databasemocks/plugin.go b/mocks/databasemocks/plugin.go index d42a533b29..915bd0af0b 100644 --- a/mocks/databasemocks/plugin.go +++ b/mocks/databasemocks/plugin.go @@ -11,6 +11,8 @@ import ( database "github.com/hyperledger/firefly/pkg/database" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" + fftypes "github.com/hyperledger/firefly-common/pkg/fftypes" mock "github.com/stretchr/testify/mock" @@ -177,11 +179,11 @@ func (_m *Plugin) GetBatchIDsForMessages(ctx context.Context, namespace string, } // GetBatches provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetBatches(ctx context.Context, namespace string, filter database.Filter) ([]*core.BatchPersisted, *database.FilterResult, error) { +func (_m *Plugin) GetBatches(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.BatchPersisted, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.BatchPersisted - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.BatchPersisted); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.BatchPersisted); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -189,17 +191,17 @@ func (_m *Plugin) GetBatches(ctx context.Context, namespace string, filter datab } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -232,11 +234,11 @@ func (_m *Plugin) GetBlobMatchingHash(ctx context.Context, hash *fftypes.Bytes32 } // GetBlobs provides a mock function with given fields: ctx, filter -func (_m *Plugin) GetBlobs(ctx context.Context, filter database.Filter) ([]*core.Blob, *database.FilterResult, error) { +func (_m *Plugin) GetBlobs(ctx context.Context, filter ffapi.Filter) ([]*core.Blob, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Blob - if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*core.Blob); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter) []*core.Blob); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -244,17 +246,17 @@ func (_m *Plugin) GetBlobs(ctx context.Context, filter database.Filter) ([]*core } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.Filter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -310,11 +312,11 @@ func (_m *Plugin) GetBlockchainEventByProtocolID(ctx context.Context, namespace } // GetBlockchainEvents provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetBlockchainEvents(ctx context.Context, namespace string, filter database.Filter) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (_m *Plugin) GetBlockchainEvents(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.BlockchainEvent - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.BlockchainEvent); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.BlockchainEvent); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -322,17 +324,17 @@ func (_m *Plugin) GetBlockchainEvents(ctx context.Context, namespace string, fil } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -411,11 +413,11 @@ func (_m *Plugin) GetContractAPIByName(ctx context.Context, namespace string, na } // GetContractAPIs provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetContractAPIs(ctx context.Context, namespace string, filter database.AndFilter) ([]*core.ContractAPI, *database.FilterResult, error) { +func (_m *Plugin) GetContractAPIs(ctx context.Context, namespace string, filter ffapi.AndFilter) ([]*core.ContractAPI, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.ContractAPI - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.ContractAPI); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.ContractAPI); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -423,17 +425,17 @@ func (_m *Plugin) GetContractAPIs(ctx context.Context, namespace string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -512,11 +514,11 @@ func (_m *Plugin) GetContractListenerByID(ctx context.Context, namespace string, } // GetContractListeners provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetContractListeners(ctx context.Context, namespace string, filter database.Filter) ([]*core.ContractListener, *database.FilterResult, error) { +func (_m *Plugin) GetContractListeners(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.ContractListener, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.ContractListener - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.ContractListener); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.ContractListener); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -524,17 +526,17 @@ func (_m *Plugin) GetContractListeners(ctx context.Context, namespace string, fi } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -544,11 +546,11 @@ func (_m *Plugin) GetContractListeners(ctx context.Context, namespace string, fi } // GetData provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetData(ctx context.Context, namespace string, filter database.Filter) (core.DataArray, *database.FilterResult, error) { +func (_m *Plugin) GetData(ctx context.Context, namespace string, filter ffapi.Filter) (core.DataArray, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 core.DataArray - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) core.DataArray); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) core.DataArray); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -556,17 +558,17 @@ func (_m *Plugin) GetData(ctx context.Context, namespace string, filter database } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -599,11 +601,11 @@ func (_m *Plugin) GetDataByID(ctx context.Context, namespace string, id *fftypes } // GetDataRefs provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetDataRefs(ctx context.Context, namespace string, filter database.Filter) (core.DataRefs, *database.FilterResult, error) { +func (_m *Plugin) GetDataRefs(ctx context.Context, namespace string, filter ffapi.Filter) (core.DataRefs, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 core.DataRefs - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) core.DataRefs); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) core.DataRefs); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -611,17 +613,17 @@ func (_m *Plugin) GetDataRefs(ctx context.Context, namespace string, filter data } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -677,11 +679,11 @@ func (_m *Plugin) GetDatatypeByName(ctx context.Context, namespace string, name } // GetDatatypes provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetDatatypes(ctx context.Context, namespace string, filter database.Filter) ([]*core.Datatype, *database.FilterResult, error) { +func (_m *Plugin) GetDatatypes(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Datatype, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Datatype - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Datatype); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Datatype); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -689,17 +691,17 @@ func (_m *Plugin) GetDatatypes(ctx context.Context, namespace string, filter dat } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -732,11 +734,11 @@ func (_m *Plugin) GetEventByID(ctx context.Context, namespace string, id *fftype } // GetEvents provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetEvents(ctx context.Context, namespace string, filter database.Filter) ([]*core.Event, *database.FilterResult, error) { +func (_m *Plugin) GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Event, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Event - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Event); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Event); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -744,17 +746,17 @@ func (_m *Plugin) GetEvents(ctx context.Context, namespace string, filter databa } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -833,11 +835,11 @@ func (_m *Plugin) GetFFIEvent(ctx context.Context, namespace string, interfaceID } // GetFFIEvents provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetFFIEvents(ctx context.Context, namespace string, filter database.Filter) ([]*fftypes.FFIEvent, *database.FilterResult, error) { +func (_m *Plugin) GetFFIEvents(ctx context.Context, namespace string, filter ffapi.Filter) ([]*fftypes.FFIEvent, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*fftypes.FFIEvent - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*fftypes.FFIEvent); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*fftypes.FFIEvent); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -845,17 +847,17 @@ func (_m *Plugin) GetFFIEvents(ctx context.Context, namespace string, filter dat } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -888,11 +890,11 @@ func (_m *Plugin) GetFFIMethod(ctx context.Context, namespace string, interfaceI } // GetFFIMethods provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetFFIMethods(ctx context.Context, namespace string, filter database.Filter) ([]*fftypes.FFIMethod, *database.FilterResult, error) { +func (_m *Plugin) GetFFIMethods(ctx context.Context, namespace string, filter ffapi.Filter) ([]*fftypes.FFIMethod, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*fftypes.FFIMethod - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*fftypes.FFIMethod); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*fftypes.FFIMethod); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -900,17 +902,17 @@ func (_m *Plugin) GetFFIMethods(ctx context.Context, namespace string, filter da } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -920,11 +922,11 @@ func (_m *Plugin) GetFFIMethods(ctx context.Context, namespace string, filter da } // GetFFIs provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetFFIs(ctx context.Context, namespace string, filter database.Filter) ([]*fftypes.FFI, *database.FilterResult, error) { +func (_m *Plugin) GetFFIs(ctx context.Context, namespace string, filter ffapi.Filter) ([]*fftypes.FFI, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*fftypes.FFI - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*fftypes.FFI); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*fftypes.FFI); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -932,17 +934,17 @@ func (_m *Plugin) GetFFIs(ctx context.Context, namespace string, filter database } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -975,11 +977,11 @@ func (_m *Plugin) GetGroupByHash(ctx context.Context, namespace string, hash *ff } // GetGroups provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetGroups(ctx context.Context, namespace string, filter database.Filter) ([]*core.Group, *database.FilterResult, error) { +func (_m *Plugin) GetGroups(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Group, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Group - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Group); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Group); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -987,17 +989,17 @@ func (_m *Plugin) GetGroups(ctx context.Context, namespace string, filter databa } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1007,11 +1009,11 @@ func (_m *Plugin) GetGroups(ctx context.Context, namespace string, filter databa } // GetIdentities provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetIdentities(ctx context.Context, namespace string, filter database.Filter) ([]*core.Identity, *database.FilterResult, error) { +func (_m *Plugin) GetIdentities(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Identity, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Identity - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Identity); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Identity); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1019,17 +1021,17 @@ func (_m *Plugin) GetIdentities(ctx context.Context, namespace string, filter da } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1131,11 +1133,11 @@ func (_m *Plugin) GetMessageByID(ctx context.Context, namespace string, id *ffty } // GetMessageIDs provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetMessageIDs(ctx context.Context, namespace string, filter database.Filter) ([]*core.IDAndSequence, error) { +func (_m *Plugin) GetMessageIDs(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.IDAndSequence, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.IDAndSequence - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.IDAndSequence); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.IDAndSequence); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1144,7 +1146,7 @@ func (_m *Plugin) GetMessageIDs(ctx context.Context, namespace string, filter da } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) error); ok { r1 = rf(ctx, namespace, filter) } else { r1 = ret.Error(1) @@ -1154,11 +1156,11 @@ func (_m *Plugin) GetMessageIDs(ctx context.Context, namespace string, filter da } // GetMessages provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetMessages(ctx context.Context, namespace string, filter database.Filter) ([]*core.Message, *database.FilterResult, error) { +func (_m *Plugin) GetMessages(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Message, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Message - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Message); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1166,17 +1168,17 @@ func (_m *Plugin) GetMessages(ctx context.Context, namespace string, filter data } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1186,11 +1188,11 @@ func (_m *Plugin) GetMessages(ctx context.Context, namespace string, filter data } // GetMessagesForData provides a mock function with given fields: ctx, namespace, dataID, filter -func (_m *Plugin) GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter database.Filter) ([]*core.Message, *database.FilterResult, error) { +func (_m *Plugin) GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter ffapi.Filter) ([]*core.Message, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, dataID, filter) var r0 []*core.Message - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Filter) []*core.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Filter) []*core.Message); ok { r0 = rf(ctx, namespace, dataID, filter) } else { if ret.Get(0) != nil { @@ -1198,17 +1200,17 @@ func (_m *Plugin) GetMessagesForData(ctx context.Context, namespace string, data } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.UUID, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.UUID, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, dataID, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, *fftypes.UUID, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, *fftypes.UUID, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, dataID, filter) } else { r2 = ret.Error(2) @@ -1287,11 +1289,11 @@ func (_m *Plugin) GetNonce(ctx context.Context, hash *fftypes.Bytes32) (*core.No } // GetNonces provides a mock function with given fields: ctx, filter -func (_m *Plugin) GetNonces(ctx context.Context, filter database.Filter) ([]*core.Nonce, *database.FilterResult, error) { +func (_m *Plugin) GetNonces(ctx context.Context, filter ffapi.Filter) ([]*core.Nonce, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Nonce - if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*core.Nonce); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter) []*core.Nonce); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -1299,17 +1301,17 @@ func (_m *Plugin) GetNonces(ctx context.Context, filter database.Filter) ([]*cor } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.Filter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -1342,11 +1344,11 @@ func (_m *Plugin) GetOffset(ctx context.Context, t fftypes.FFEnum, name string) } // GetOffsets provides a mock function with given fields: ctx, filter -func (_m *Plugin) GetOffsets(ctx context.Context, filter database.Filter) ([]*core.Offset, *database.FilterResult, error) { +func (_m *Plugin) GetOffsets(ctx context.Context, filter ffapi.Filter) ([]*core.Offset, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Offset - if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*core.Offset); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter) []*core.Offset); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -1354,17 +1356,17 @@ func (_m *Plugin) GetOffsets(ctx context.Context, filter database.Filter) ([]*co } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.Filter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -1397,11 +1399,11 @@ func (_m *Plugin) GetOperationByID(ctx context.Context, namespace string, id *ff } // GetOperations provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetOperations(ctx context.Context, namespace string, filter database.Filter) ([]*core.Operation, *database.FilterResult, error) { +func (_m *Plugin) GetOperations(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Operation, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Operation - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Operation); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Operation); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1409,17 +1411,17 @@ func (_m *Plugin) GetOperations(ctx context.Context, namespace string, filter da } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1429,11 +1431,11 @@ func (_m *Plugin) GetOperations(ctx context.Context, namespace string, filter da } // GetPins provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetPins(ctx context.Context, namespace string, filter database.Filter) ([]*core.Pin, *database.FilterResult, error) { +func (_m *Plugin) GetPins(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Pin, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Pin - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Pin); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Pin); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1441,17 +1443,17 @@ func (_m *Plugin) GetPins(ctx context.Context, namespace string, filter database } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1507,11 +1509,11 @@ func (_m *Plugin) GetSubscriptionByName(ctx context.Context, namespace string, n } // GetSubscriptions provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetSubscriptions(ctx context.Context, namespace string, filter database.Filter) ([]*core.Subscription, *database.FilterResult, error) { +func (_m *Plugin) GetSubscriptions(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Subscription, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Subscription - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Subscription); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Subscription); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1519,17 +1521,17 @@ func (_m *Plugin) GetSubscriptions(ctx context.Context, namespace string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1539,11 +1541,11 @@ func (_m *Plugin) GetSubscriptions(ctx context.Context, namespace string, filter } // GetTokenAccountPools provides a mock function with given fields: ctx, namespace, key, filter -func (_m *Plugin) GetTokenAccountPools(ctx context.Context, namespace string, key string, filter database.Filter) ([]*core.TokenAccountPool, *database.FilterResult, error) { +func (_m *Plugin) GetTokenAccountPools(ctx context.Context, namespace string, key string, filter ffapi.Filter) ([]*core.TokenAccountPool, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, key, filter) var r0 []*core.TokenAccountPool - if rf, ok := ret.Get(0).(func(context.Context, string, string, database.Filter) []*core.TokenAccountPool); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, string, ffapi.Filter) []*core.TokenAccountPool); ok { r0 = rf(ctx, namespace, key, filter) } else { if ret.Get(0) != nil { @@ -1551,17 +1553,17 @@ func (_m *Plugin) GetTokenAccountPools(ctx context.Context, namespace string, ke } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, key, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, key, filter) } else { r2 = ret.Error(2) @@ -1571,11 +1573,11 @@ func (_m *Plugin) GetTokenAccountPools(ctx context.Context, namespace string, ke } // GetTokenAccounts provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTokenAccounts(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenAccount, *database.FilterResult, error) { +func (_m *Plugin) GetTokenAccounts(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenAccount, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.TokenAccount - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.TokenAccount); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.TokenAccount); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1583,17 +1585,17 @@ func (_m *Plugin) GetTokenAccounts(ctx context.Context, namespace string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1649,11 +1651,11 @@ func (_m *Plugin) GetTokenApprovalByProtocolID(ctx context.Context, namespace st } // GetTokenApprovals provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTokenApprovals(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenApproval, *database.FilterResult, error) { +func (_m *Plugin) GetTokenApprovals(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenApproval, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.TokenApproval - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.TokenApproval); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.TokenApproval); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1661,17 +1663,17 @@ func (_m *Plugin) GetTokenApprovals(ctx context.Context, namespace string, filte } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1704,11 +1706,11 @@ func (_m *Plugin) GetTokenBalance(ctx context.Context, namespace string, poolID } // GetTokenBalances provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTokenBalances(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenBalance, *database.FilterResult, error) { +func (_m *Plugin) GetTokenBalances(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenBalance, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.TokenBalance - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.TokenBalance); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.TokenBalance); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1716,17 +1718,17 @@ func (_m *Plugin) GetTokenBalances(ctx context.Context, namespace string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1805,11 +1807,11 @@ func (_m *Plugin) GetTokenPoolByLocator(ctx context.Context, namespace string, c } // GetTokenPools provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTokenPools(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenPool, *database.FilterResult, error) { +func (_m *Plugin) GetTokenPools(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenPool, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.TokenPool - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.TokenPool); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.TokenPool); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1817,17 +1819,17 @@ func (_m *Plugin) GetTokenPools(ctx context.Context, namespace string, filter da } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1883,11 +1885,11 @@ func (_m *Plugin) GetTokenTransferByProtocolID(ctx context.Context, namespace st } // GetTokenTransfers provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTokenTransfers(ctx context.Context, namespace string, filter database.Filter) ([]*core.TokenTransfer, *database.FilterResult, error) { +func (_m *Plugin) GetTokenTransfers(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenTransfer, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.TokenTransfer - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.TokenTransfer); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.TokenTransfer); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1895,17 +1897,17 @@ func (_m *Plugin) GetTokenTransfers(ctx context.Context, namespace string, filte } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -1938,11 +1940,11 @@ func (_m *Plugin) GetTransactionByID(ctx context.Context, namespace string, id * } // GetTransactions provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetTransactions(ctx context.Context, namespace string, filter database.Filter) ([]*core.Transaction, *database.FilterResult, error) { +func (_m *Plugin) GetTransactions(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Transaction, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Transaction - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Transaction); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Transaction); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -1950,17 +1952,17 @@ func (_m *Plugin) GetTransactions(ctx context.Context, namespace string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -2016,11 +2018,11 @@ func (_m *Plugin) GetVerifierByValue(ctx context.Context, vType fftypes.FFEnum, } // GetVerifiers provides a mock function with given fields: ctx, namespace, filter -func (_m *Plugin) GetVerifiers(ctx context.Context, namespace string, filter database.Filter) ([]*core.Verifier, *database.FilterResult, error) { +func (_m *Plugin) GetVerifiers(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.Verifier, *ffapi.FilterResult, error) { ret := _m.Called(ctx, namespace, filter) var r0 []*core.Verifier - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter) []*core.Verifier); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter) []*core.Verifier); ok { r0 = rf(ctx, namespace, filter) } else { if ret.Get(0) != nil { @@ -2028,17 +2030,17 @@ func (_m *Plugin) GetVerifiers(ctx context.Context, namespace string, filter dat } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.Filter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.Filter) *ffapi.FilterResult); ok { r1 = rf(ctx, namespace, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.Filter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.Filter) error); ok { r2 = rf(ctx, namespace, filter) } else { r2 = ret.Error(2) @@ -2305,11 +2307,11 @@ func (_m *Plugin) SetHandler(namespace string, handler database.Callbacks) { } // UpdateBatch provides a mock function with given fields: ctx, namespace, id, update -func (_m *Plugin) UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) error { +func (_m *Plugin) UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) error { ret := _m.Called(ctx, namespace, id, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Update) error); ok { r0 = rf(ctx, namespace, id, update) } else { r0 = ret.Error(0) @@ -2319,11 +2321,11 @@ func (_m *Plugin) UpdateBatch(ctx context.Context, namespace string, id *fftypes } // UpdateData provides a mock function with given fields: ctx, namespace, id, update -func (_m *Plugin) UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) error { +func (_m *Plugin) UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) error { ret := _m.Called(ctx, namespace, id, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Update) error); ok { r0 = rf(ctx, namespace, id, update) } else { r0 = ret.Error(0) @@ -2333,11 +2335,11 @@ func (_m *Plugin) UpdateData(ctx context.Context, namespace string, id *fftypes. } // UpdateMessage provides a mock function with given fields: ctx, namespace, id, update -func (_m *Plugin) UpdateMessage(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) error { +func (_m *Plugin) UpdateMessage(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) error { ret := _m.Called(ctx, namespace, id, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Update) error); ok { r0 = rf(ctx, namespace, id, update) } else { r0 = ret.Error(0) @@ -2347,11 +2349,11 @@ func (_m *Plugin) UpdateMessage(ctx context.Context, namespace string, id *fftyp } // UpdateMessages provides a mock function with given fields: ctx, namespace, filter, update -func (_m *Plugin) UpdateMessages(ctx context.Context, namespace string, filter database.Filter, update database.Update) error { +func (_m *Plugin) UpdateMessages(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) error { ret := _m.Called(ctx, namespace, filter, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter, ffapi.Update) error); ok { r0 = rf(ctx, namespace, filter, update) } else { r0 = ret.Error(0) @@ -2361,11 +2363,11 @@ func (_m *Plugin) UpdateMessages(ctx context.Context, namespace string, filter d } // UpdateNextPin provides a mock function with given fields: ctx, namespace, sequence, update -func (_m *Plugin) UpdateNextPin(ctx context.Context, namespace string, sequence int64, update database.Update) error { +func (_m *Plugin) UpdateNextPin(ctx context.Context, namespace string, sequence int64, update ffapi.Update) error { ret := _m.Called(ctx, namespace, sequence, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, int64, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, int64, ffapi.Update) error); ok { r0 = rf(ctx, namespace, sequence, update) } else { r0 = ret.Error(0) @@ -2389,11 +2391,11 @@ func (_m *Plugin) UpdateNonce(ctx context.Context, nonce *core.Nonce) error { } // UpdateOffset provides a mock function with given fields: ctx, rowID, update -func (_m *Plugin) UpdateOffset(ctx context.Context, rowID int64, update database.Update) error { +func (_m *Plugin) UpdateOffset(ctx context.Context, rowID int64, update ffapi.Update) error { ret := _m.Called(ctx, rowID, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int64, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, int64, ffapi.Update) error); ok { r0 = rf(ctx, rowID, update) } else { r0 = ret.Error(0) @@ -2403,11 +2405,11 @@ func (_m *Plugin) UpdateOffset(ctx context.Context, rowID int64, update database } // UpdateOperation provides a mock function with given fields: ctx, namespace, id, update -func (_m *Plugin) UpdateOperation(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) error { +func (_m *Plugin) UpdateOperation(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) error { ret := _m.Called(ctx, namespace, id, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Update) error); ok { r0 = rf(ctx, namespace, id, update) } else { r0 = ret.Error(0) @@ -2417,11 +2419,11 @@ func (_m *Plugin) UpdateOperation(ctx context.Context, namespace string, id *fft } // UpdatePins provides a mock function with given fields: ctx, namespace, filter, update -func (_m *Plugin) UpdatePins(ctx context.Context, namespace string, filter database.Filter, update database.Update) error { +func (_m *Plugin) UpdatePins(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) error { ret := _m.Called(ctx, namespace, filter, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, database.Filter, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Filter, ffapi.Update) error); ok { r0 = rf(ctx, namespace, filter, update) } else { r0 = ret.Error(0) @@ -2431,11 +2433,11 @@ func (_m *Plugin) UpdatePins(ctx context.Context, namespace string, filter datab } // UpdateSubscription provides a mock function with given fields: ctx, namespace, name, update -func (_m *Plugin) UpdateSubscription(ctx context.Context, namespace string, name string, update database.Update) error { +func (_m *Plugin) UpdateSubscription(ctx context.Context, namespace string, name string, update ffapi.Update) error { ret := _m.Called(ctx, namespace, name, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, string, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, string, ffapi.Update) error); ok { r0 = rf(ctx, namespace, name, update) } else { r0 = ret.Error(0) @@ -2445,11 +2447,11 @@ func (_m *Plugin) UpdateSubscription(ctx context.Context, namespace string, name } // UpdateTokenApprovals provides a mock function with given fields: ctx, filter, update -func (_m *Plugin) UpdateTokenApprovals(ctx context.Context, filter database.Filter, update database.Update) error { +func (_m *Plugin) UpdateTokenApprovals(ctx context.Context, filter ffapi.Filter, update ffapi.Update) error { ret := _m.Called(ctx, filter, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, database.Filter, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter, ffapi.Update) error); ok { r0 = rf(ctx, filter, update) } else { r0 = ret.Error(0) @@ -2473,11 +2475,11 @@ func (_m *Plugin) UpdateTokenBalances(ctx context.Context, transfer *core.TokenT } // UpdateTransaction provides a mock function with given fields: ctx, namespace, id, update -func (_m *Plugin) UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update database.Update) error { +func (_m *Plugin) UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) error { ret := _m.Called(ctx, namespace, id, update) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, database.Update) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, ffapi.Update) error); ok { r0 = rf(ctx, namespace, id, update) } else { r0 = ret.Error(0) diff --git a/pkg/core/message.go b/pkg/core/message.go index 019207fff1..2f3163ec01 100644 --- a/pkg/core/message.go +++ b/pkg/core/message.go @@ -74,28 +74,28 @@ type MessageHeader struct { Type MessageType `ffstruct:"MessageHeader" json:"type" ffenum:"messagetype"` TxType TransactionType `ffstruct:"MessageHeader" json:"txtype,omitempty" ffenum:"txtype"` SignerRef - Created *fftypes.FFTime `ffstruct:"MessageHeader" json:"created,omitempty" ffexcludeinput:"true"` - Namespace string `ffstruct:"MessageHeader" json:"namespace,omitempty" ffexcludeinput:"true"` - Group *fftypes.Bytes32 `ffstruct:"MessageHeader" json:"group,omitempty" ffexclude:"postNewMessageBroadcast"` - Topics FFStringArray `ffstruct:"MessageHeader" json:"topics,omitempty"` - Tag string `ffstruct:"MessageHeader" json:"tag,omitempty"` - DataHash *fftypes.Bytes32 `ffstruct:"MessageHeader" json:"datahash,omitempty" ffexcludeinput:"true"` + Created *fftypes.FFTime `ffstruct:"MessageHeader" json:"created,omitempty" ffexcludeinput:"true"` + Namespace string `ffstruct:"MessageHeader" json:"namespace,omitempty" ffexcludeinput:"true"` + Group *fftypes.Bytes32 `ffstruct:"MessageHeader" json:"group,omitempty" ffexclude:"postNewMessageBroadcast"` + Topics fftypes.FFStringArray `ffstruct:"MessageHeader" json:"topics,omitempty"` + Tag string `ffstruct:"MessageHeader" json:"tag,omitempty"` + DataHash *fftypes.Bytes32 `ffstruct:"MessageHeader" json:"datahash,omitempty" ffexcludeinput:"true"` } // Message is the envelope by which coordinated data exchange can happen between parties in the network // Data is passed by reference in these messages, and a chain of hashes covering the data and the // details of the message, provides a verification against tampering. type Message struct { - Header MessageHeader `ffstruct:"Message" json:"header"` - LocalNamespace string `ffstruct:"Message" json:"localNamespace,omitempty" ffexcludeinput:"true"` - Hash *fftypes.Bytes32 `ffstruct:"Message" json:"hash,omitempty" ffexcludeinput:"true"` - BatchID *fftypes.UUID `ffstruct:"Message" json:"batch,omitempty" ffexcludeinput:"true"` - State MessageState `ffstruct:"Message" json:"state,omitempty" ffenum:"messagestate" ffexcludeinput:"true"` - Confirmed *fftypes.FFTime `ffstruct:"Message" json:"confirmed,omitempty" ffexcludeinput:"true"` - Data DataRefs `ffstruct:"Message" json:"data" ffexcludeinput:"true"` - Pins FFStringArray `ffstruct:"Message" json:"pins,omitempty" ffexcludeinput:"true"` - IdempotencyKey IdempotencyKey `ffstruct:"Message" json:"idempotencyKey,omitempty"` - Sequence int64 `ffstruct:"Message" json:"-"` // Local database sequence used internally for batch assembly + Header MessageHeader `ffstruct:"Message" json:"header"` + LocalNamespace string `ffstruct:"Message" json:"localNamespace,omitempty" ffexcludeinput:"true"` + Hash *fftypes.Bytes32 `ffstruct:"Message" json:"hash,omitempty" ffexcludeinput:"true"` + BatchID *fftypes.UUID `ffstruct:"Message" json:"batch,omitempty" ffexcludeinput:"true"` + State MessageState `ffstruct:"Message" json:"state,omitempty" ffenum:"messagestate" ffexcludeinput:"true"` + Confirmed *fftypes.FFTime `ffstruct:"Message" json:"confirmed,omitempty" ffexcludeinput:"true"` + Data DataRefs `ffstruct:"Message" json:"data" ffexcludeinput:"true"` + Pins fftypes.FFStringArray `ffstruct:"Message" json:"pins,omitempty" ffexcludeinput:"true"` + IdempotencyKey IdempotencyKey `ffstruct:"Message" json:"idempotencyKey,omitempty"` + Sequence int64 `ffstruct:"Message" json:"-"` // Local database sequence used internally for batch assembly } // BatchMessage is the fields in a message record that are assured to be consistent on all parties. diff --git a/pkg/core/message_test.go b/pkg/core/message_test.go index f5995da53b..e63251aebc 100644 --- a/pkg/core/message_test.go +++ b/pkg/core/message_test.go @@ -272,7 +272,7 @@ func TestMessageImmutable(t *testing.T) { Data: DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, - Pins: NewFFStringArray("pin1", "pin2"), + Pins: fftypes.NewFFStringArray("pin1", "pin2"), } assert.True(t, msg.Hash.Equals(msg.BatchMessage().Hash)) } diff --git a/pkg/core/stringarray.go b/pkg/core/stringarray.go deleted file mode 100644 index 4b32ab69f0..0000000000 --- a/pkg/core/stringarray.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "database/sql/driver" - "fmt" - "sort" - "strings" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly-common/pkg/i18n" -) - -// FFStringArray is an array of strings, each conforming to the requirements of a FireFly name -type FFStringArray []string - -// Because each FFName has a max length of 64, 15 names (plus comma delimeters) is a safe max -// to pack into a string column of length 1024 -const FFStringNameItemsMax = 15 - -// FFStringArrayStandardMax is the standard length we set as a VARCHAR max in tables for a string array -const FFStringArrayStandardMax = 1024 - -func NewFFStringArray(initialContent ...string) FFStringArray { - sa := make(FFStringArray, 0, len(initialContent)) - for _, s := range initialContent { - if s != "" { - sa = append(sa, s) - } - } - return sa -} - -func (sa FFStringArray) Value() (driver.Value, error) { - if sa == nil { - return "", nil - } - return strings.Join([]string(sa), ","), nil -} - -func (sa *FFStringArray) Scan(src interface{}) error { - switch st := src.(type) { - case string: - if st == "" { - *sa = []string{} - return nil - } - *sa = strings.Split(st, ",") - return nil - case []byte: - if len(st) == 0 { - *sa = []string{} - return nil - } - *sa = strings.Split(string(st), ",") - return nil - case FFStringArray: - *sa = st - return nil - case nil: - *sa = []string{} - return nil - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, sa) - } -} - -func (sa FFStringArray) String() string { - if sa == nil { - return "" - } - return strings.Join([]string(sa), ",") -} - -func (sa FFStringArray) Validate(ctx context.Context, fieldName string, isName bool, maxItems int) error { - var totalLength int - dupCheck := make(map[string]bool) - for i, n := range sa { - if dupCheck[n] { - return i18n.NewError(ctx, i18n.MsgDuplicateArrayEntry, fieldName, i, n) - } - dupCheck[n] = true - totalLength += len(n) - if isName { - if err := fftypes.ValidateFFNameField(ctx, n, fmt.Sprintf("%s[%d]", fieldName, i)); err != nil { - return err - } - } else { - if err := fftypes.ValidateSafeCharsOnly(ctx, n, fmt.Sprintf("%s[%d]", fieldName, i)); err != nil { - return err - } - } - } - if maxItems > 0 && len(sa) > maxItems { - return i18n.NewError(ctx, i18n.MsgTooManyItems, fieldName, FFStringNameItemsMax, len(sa)) - } - if totalLength > FFStringArrayStandardMax { - return i18n.NewError(ctx, i18n.MsgFieldTooLong, fieldName, FFStringArrayStandardMax) - } - return nil -} - -func (sa FFStringArray) appendLowerIfUnique(s string) FFStringArray { - if s == "" { - return sa - } - for _, existing := range sa { - if strings.EqualFold(s, existing) { - return sa - } - } - return append(sa, strings.ToLower(s)) -} - -// AddToSortedSet determines if the new string is already in the set of strings (case insensitive), -// and if not it adds it to the list (lower case) and returns a new slice of alphabetically sorted -// strings reference and true. -// If no change is made, the original reference is returned and false. -func (sa FFStringArray) AddToSortedSet(newValues ...string) (res FFStringArray, changed bool) { - res = sa - for _, s := range newValues { - res = res.appendLowerIfUnique(s) - } - if len(res) != len(sa) { - sort.Strings(res) - return res, true - } - return sa, false -} diff --git a/pkg/core/stringarray_test.go b/pkg/core/stringarray_test.go deleted file mode 100644 index db72ac7bd2..0000000000 --- a/pkg/core/stringarray_test.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package core - -import ( - "context" - "fmt" - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestFFStringArrayVerifyTooLong(t *testing.T) { - na := make(FFStringArray, 16) - for i := 0; i < 16; i++ { - na[i] = fmt.Sprintf("item_%d", i) - } - err := na.Validate(context.Background(), "field1", true, FFStringNameItemsMax) - assert.Regexp(t, `FF00134.*field1`, err) -} - -func TestFFStringArrayVerifyDuplicate(t *testing.T) { - na := FFStringArray{"value1", "value2", "value1"} - err := na.Validate(context.Background(), "field1", true, FFStringNameItemsMax) - assert.Regexp(t, `FF00133.*field1`, err) -} - -func TestFFStringArrayVerifyBadName(t *testing.T) { - na := FFStringArray{"!valid"} - err := na.Validate(context.Background(), "field1", true, FFStringNameItemsMax) - assert.Regexp(t, `FF00140.*field1\[0\]`, err) -} - -func TestFFStringArrayVerifyBadNonName(t *testing.T) { - na := FFStringArray{"!valid"} - err := na.Validate(context.Background(), "field1", false, FFStringNameItemsMax) - assert.Regexp(t, `FF00139.*field1\[0\]`, err) -} - -func TestFFStringArrayVerifyTooLongTotal(t *testing.T) { - longstr := strings.Builder{} - for i := 0; i < (FFStringArrayStandardMax + 1); i++ { - longstr.WriteRune('a') - } - na := FFStringArray{longstr.String()} - err := na.Validate(context.Background(), "field1", false, FFStringNameItemsMax) - assert.Regexp(t, `FF00135.*field1`, err) -} - -func TestFFStringArrayScanValue(t *testing.T) { - - na1 := FFStringArray{"name1", "name2"} - v, err := na1.Value() - assert.NoError(t, err) - assert.Equal(t, "name1,name2", v) - - var na2 FFStringArray - assert.Equal(t, "", na2.String()) - v, err = na2.Value() - assert.Equal(t, "", v) - err = na2.Scan("name1,name2") - assert.NoError(t, err) - assert.Equal(t, "name1,name2", na2.String()) - - var na3 FFStringArray - err = na3.Scan([]byte("name1,name2")) - assert.NoError(t, err) - assert.Equal(t, "name1,name2", na3.String()) - - var na4 FFStringArray - err = na4.Scan([]byte(nil)) - assert.NoError(t, err) - assert.Equal(t, "", na4.String()) - err = na4.Scan(nil) - assert.NoError(t, err) - assert.Equal(t, "", na4.String()) - v, err = na4.Value() - assert.NoError(t, err) - assert.Equal(t, "", v) - - var na5 FFStringArray - err = na5.Scan("") - assert.NoError(t, err) - assert.Equal(t, FFStringArray{}, na5) - assert.Equal(t, "", na5.String()) - - var na6 FFStringArray - err = na6.Scan(42) - assert.Regexp(t, "FF00105", err) - - var na7 FFStringArray - err = na7.Scan(FFStringArray{"test1", "test2"}) - assert.Equal(t, FFStringArray{"test1", "test2"}, na7) - -} - -func TestFFStringArrayMergeFold(t *testing.T) { - - sa := NewFFStringArray("name2", "name1") - - nsa, changed := sa.AddToSortedSet("Name3") - assert.True(t, changed) - assert.Equal(t, FFStringArray{"name1", "name2", "name3"}, nsa) - - nsa, changed = sa.AddToSortedSet("name1", "") - assert.False(t, changed) - assert.Equal(t, sa, nsa) - - nsa, changed = sa.AddToSortedSet("NAME4", "NAME3", "name1", "name2") - assert.True(t, changed) - assert.Equal(t, FFStringArray{"name1", "name2", "name3", "name4"}, nsa) - -} diff --git a/pkg/core/transaction.go b/pkg/core/transaction.go index 3b5eb02ebf..18fa0f3927 100644 --- a/pkg/core/transaction.go +++ b/pkg/core/transaction.go @@ -61,12 +61,12 @@ type BlockchainTransactionRef struct { // Transaction is a unit of work sent or received by this node // It serves as a container for one or more Operations, BlockchainEvents, and other related objects type Transaction struct { - ID *fftypes.UUID `ffstruct:"Transaction" json:"id,omitempty"` - Namespace string `ffstruct:"Transaction" json:"namespace,omitempty"` - Type TransactionType `ffstruct:"Transaction" json:"type" ffenum:"txtype"` - Created *fftypes.FFTime `ffstruct:"Transaction" json:"created"` - IdempotencyKey IdempotencyKey `ffstruct:"Transaction" json:"idempotencyKey,omitempty"` - BlockchainIDs FFStringArray `ffstruct:"Transaction" json:"blockchainIds,omitempty"` + ID *fftypes.UUID `ffstruct:"Transaction" json:"id,omitempty"` + Namespace string `ffstruct:"Transaction" json:"namespace,omitempty"` + Type TransactionType `ffstruct:"Transaction" json:"type" ffenum:"txtype"` + Created *fftypes.FFTime `ffstruct:"Transaction" json:"created"` + IdempotencyKey IdempotencyKey `ffstruct:"Transaction" json:"idempotencyKey,omitempty"` + BlockchainIDs fftypes.FFStringArray `ffstruct:"Transaction" json:"blockchainIds,omitempty"` } type TransactionStatusType string diff --git a/pkg/database/filter.go b/pkg/database/filter.go deleted file mode 100644 index 5551247253..0000000000 --- a/pkg/database/filter.go +++ /dev/null @@ -1,597 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package database - -import ( - "context" - "database/sql/driver" - "fmt" - "strconv" - "strings" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly-common/pkg/i18n" -) - -// Filter is the output of the builder -type Filter interface { - // Sort adds a set of sort conditions (all in a single sort order) - Sort(...string) Filter - - // Ascending sort order - Ascending() Filter - - // Descending sort order - Descending() Filter - - // Skip for pagination - Skip(uint64) Filter - - // Limit for pagination - Limit(uint64) Filter - - // Request a count to be returned on the total number that match the query - Count(c bool) Filter - - // Finalize completes the filter, and for the plugin to validated output structure to convert - Finalize() (*FilterInfo, error) - - // Builder returns the builder that made it - Builder() FilterBuilder -} - -// MultiConditionFilter gives convenience methods to add conditions -type MultiConditionFilter interface { - Filter - // Add adds filters to the condition - Condition(...Filter) MultiConditionFilter -} - -type AndFilter interface{ MultiConditionFilter } - -type OrFilter interface{ MultiConditionFilter } - -// FilterOp enum of filter operations that must be implemented by plugins - the string value is -// used in the core string formatting method (for logging etc.) -type FilterOp string - -// The character pairs in this are not used anywhere externally, just in a to-string representation of queries -const ( - // FilterOpAnd and - FilterOpAnd FilterOp = "&&" - // FilterOpOr or - FilterOpOr FilterOp = "||" - // FilterOpEq equal - FilterOpEq FilterOp = "==" - // FilterOpIEq equal - FilterOpIEq FilterOp = ":=" - // FilterOpNe not equal - FilterOpNeq FilterOp = "!=" - // FilterOpNIeq not equal - FilterOpNIeq FilterOp = ";=" - // FilterOpIn in list of values - FilterOpIn FilterOp = "IN" - // FilterOpNotIn not in list of values - FilterOpNotIn FilterOp = "NI" - // FilterOpGt greater than - FilterOpGt FilterOp = ">>" - // FilterOpLt less than - FilterOpLt FilterOp = "<<" - // FilterOpGte greater than or equal - FilterOpGte FilterOp = ">=" - // FilterOpLte less than or equal - FilterOpLte FilterOp = "<=" - // FilterOpCont contains the specified text, case sensitive - FilterOpCont FilterOp = "%=" - // FilterOpNotCont does not contain the specified text, case sensitive - FilterOpNotCont FilterOp = "!%" - // FilterOpICont contains the specified text, case insensitive - FilterOpICont FilterOp = ":%" - // FilterOpNotICont does not contain the specified text, case insensitive - FilterOpNotICont FilterOp = ";%" - // FilterOpStartsWith contains the specified text, case sensitive - FilterOpStartsWith FilterOp = "^=" - // FilterOpNotCont does not contain the specified text, case sensitive - FilterOpNotStartsWith FilterOp = "!^" - // FilterOpICont contains the specified text, case insensitive - FilterOpIStartsWith FilterOp = ":^" - // FilterOpNotICont does not contain the specified text, case insensitive - FilterOpNotIStartsWith FilterOp = ";^" - // FilterOpEndsWith contains the specified text, case sensitive - FilterOpEndsWith FilterOp = "$=" - // FilterOpNotCont does not contain the specified text, case sensitive - FilterOpNotEndsWith FilterOp = "!$" - // FilterOpICont contains the specified text, case insensitive - FilterOpIEndsWith FilterOp = ":$" - // FilterOpNotICont does not contain the specified text, case insensitive - FilterOpNotIEndsWith FilterOp = ";$" -) - -func filterOpIsStringMatch(op FilterOp) bool { - for _, r := range string(op) { - switch r { - case '%', '^', '$', ':': - // Partial or case-insensitive matches all need a string - return true - } - } - return false -} - -func filterCannotAcceptNull(op FilterOp) bool { - for _, r := range string(op) { - switch r { - case '%', '^', '$', ':', '>', '<': - // string based matching, or gt/lt cannot accept null - return true - } - } - return false -} - -// FilterBuilder is the syntax used to build the filter, where And() and Or() can be nested -type FilterBuilder interface { - // Fields is the list of available fields - Fields() []string - // And requires all sub-filters to match - And(and ...Filter) AndFilter - // Or requires any of the sub-filters to match - Or(and ...Filter) OrFilter - // Eq equal - case sensitive - Eq(name string, value driver.Value) Filter - // Neq not equal - case sensitive - Neq(name string, value driver.Value) Filter - // IEq equal - case insensitive - IEq(name string, value driver.Value) Filter - // INeq not equal - case insensitive - NIeq(name string, value driver.Value) Filter - // In one of an array of values - In(name string, value []driver.Value) Filter - // NotIn not one of an array of values - NotIn(name string, value []driver.Value) Filter - // Lt less than - Lt(name string, value driver.Value) Filter - // Gt greater than - Gt(name string, value driver.Value) Filter - // Gte greater than or equal - Gte(name string, value driver.Value) Filter - // Lte less than or equal - Lte(name string, value driver.Value) Filter - // Contains allows the string anywhere - case sensitive - Contains(name string, value driver.Value) Filter - // NotContains disallows the string anywhere - case sensitive - NotContains(name string, value driver.Value) Filter - // IContains allows the string anywhere - case insensitive - IContains(name string, value driver.Value) Filter - // INotContains disallows the string anywhere - case insensitive - NotIContains(name string, value driver.Value) Filter - // StartsWith allows the string at the start - case sensitive - StartsWith(name string, value driver.Value) Filter - // NotStartsWith disallows the string at the start - case sensitive - NotStartsWith(name string, value driver.Value) Filter - // IStartsWith allows the string at the start - case insensitive - IStartsWith(name string, value driver.Value) Filter - // NotIStartsWith disallows the string att the start - case insensitive - NotIStartsWith(name string, value driver.Value) Filter - // EndsWith allows the string at the end - case sensitive - EndsWith(name string, value driver.Value) Filter - // NotEndsWith disallows the string at the end - case sensitive - NotEndsWith(name string, value driver.Value) Filter - // IEndsWith allows the string at the end - case insensitive - IEndsWith(name string, value driver.Value) Filter - // NotIEndsWith disallows the string att the end - case insensitive - NotIEndsWith(name string, value driver.Value) Filter -} - -// NullBehavior specifies whether to sort nulls first or last in a query -type NullBehavior int - -const ( - NullsDefault NullBehavior = iota - NullsFirst - NullsLast -) - -// SortField is field+direction for sorting -type SortField struct { - Field string - Descending bool - Nulls NullBehavior -} - -// FilterInfo is the structure returned by Finalize to the plugin, to serialize this filter -// into the underlying database mechanism's filter language -type FilterInfo struct { - Sort []*SortField - Skip uint64 - Limit uint64 - Count bool - CountExpr string - Field string - Op FilterOp - Values []FieldSerialization - Value FieldSerialization - Children []*FilterInfo -} - -// FilterResult is has additional info if requested on the query - currently only the total count -type FilterResult struct { - TotalCount *int64 -} - -func valueString(f FieldSerialization) string { - v, _ := f.Value() - switch tv := v.(type) { - case nil: - return fftypes.NullString - case []byte: - return fmt.Sprintf("'%s'", tv) - case int64: - return strconv.FormatInt(tv, 10) - case bool: - return fmt.Sprintf("%t", tv) - default: - return fmt.Sprintf("'%s'", tv) - } -} - -func (f *FilterInfo) filterString() string { - switch f.Op { - case FilterOpAnd, FilterOpOr: - cs := make([]string, len(f.Children)) - for i, c := range f.Children { - cs[i] = fmt.Sprintf("( %s )", c.filterString()) - } - return strings.Join(cs, fmt.Sprintf(" %s ", f.Op)) - case FilterOpIn, FilterOpNotIn: - strValues := make([]string, len(f.Values)) - for i, v := range f.Values { - strValues[i] = valueString(v) - } - return fmt.Sprintf("%s %s [%s]", f.Field, f.Op, strings.Join(strValues, ",")) - default: - return fmt.Sprintf("%s %s %s", f.Field, f.Op, valueString(f.Value)) - } -} - -func (f *FilterInfo) String() string { - - var val strings.Builder - - val.WriteString(f.filterString()) - - if len(f.Sort) > 0 { - fields := make([]string, len(f.Sort)) - for i, s := range f.Sort { - if s.Descending { - fields[i] = "-" - } - fields[i] += s.Field - } - val.WriteString(fmt.Sprintf(" sort=%s", strings.Join(fields, ","))) - } - if f.Skip > 0 { - val.WriteString(fmt.Sprintf(" skip=%d", f.Skip)) - } - if f.Limit > 0 { - val.WriteString(fmt.Sprintf(" limit=%d", f.Limit)) - } - if f.Count { - val.WriteString(" count=true") - } - - return val.String() -} - -func (fb *filterBuilder) Fields() []string { - keys := make([]string, len(fb.queryFields)) - i := 0 - for k := range fb.queryFields { - keys[i] = k - i++ - } - return keys -} - -type filterBuilder struct { - ctx context.Context - queryFields queryFields - sort []*SortField - skip uint64 - limit uint64 - count bool - forceAscending bool - forceDescending bool -} - -type baseFilter struct { - fb *filterBuilder - children []Filter - op FilterOp - field string - value interface{} -} - -func (f *baseFilter) Builder() FilterBuilder { - return f.fb -} - -func (f *baseFilter) Finalize() (fi *FilterInfo, err error) { - var children []*FilterInfo - var value FieldSerialization - var values []FieldSerialization - - switch f.op { - case FilterOpAnd, FilterOpOr: - children = make([]*FilterInfo, len(f.children)) - for i, c := range f.children { - if children[i], err = c.Finalize(); err != nil { - return nil, err - } - } - case FilterOpIn, FilterOpNotIn: - fValues := f.value.([]driver.Value) - values = make([]FieldSerialization, len(fValues)) - name := strings.ToLower(f.field) - field, ok := f.fb.queryFields[name] - if !ok { - return nil, i18n.NewError(f.fb.ctx, i18n.MsgInvalidFilterField, name) - } - for i, fv := range fValues { - values[i] = field.getSerialization() - if err = values[i].Scan(fv); err != nil { - return nil, i18n.WrapError(f.fb.ctx, err, i18n.MsgInvalidValueForFilterField, name) - } - } - default: - name := strings.ToLower(f.field) - field, ok := f.fb.queryFields[name] - if !ok { - return nil, i18n.NewError(f.fb.ctx, i18n.MsgInvalidFilterField, name) - } - skipScan := false - switch f.value.(type) { - case nil: - if filterCannotAcceptNull(f.op) { - return nil, i18n.NewError(f.fb.ctx, i18n.MsgFieldMatchNoNull, f.op, name) - } - value = &nullField{} - skipScan = true - case string: - switch { - case field.filterAsString(): - value = &stringField{} - case filterOpIsStringMatch(f.op): - return nil, i18n.NewError(f.fb.ctx, i18n.MsgFieldTypeNoStringMatching, name, field.description()) - default: - value = field.getSerialization() - } - default: - value = field.getSerialization() - } - if !skipScan { - if err = value.Scan(f.value); err != nil { - return nil, i18n.WrapError(f.fb.ctx, err, i18n.MsgInvalidValueForFilterField, name) - } - } - } - - if f.fb.forceDescending { - for _, sf := range f.fb.sort { - sf.Descending = true - } - } else if f.fb.forceAscending { - for _, sf := range f.fb.sort { - sf.Descending = false - } - } - - return &FilterInfo{ - Children: children, - Op: f.op, - Field: f.field, - Values: values, - Value: value, - Sort: f.fb.sort, - Skip: f.fb.skip, - Limit: f.fb.limit, - Count: f.fb.count, - }, nil -} - -func (f *baseFilter) Sort(fields ...string) Filter { - for _, field := range fields { - descending := false - if strings.HasPrefix(field, "-") { - field = strings.TrimPrefix(field, "-") - descending = true - } - if _, ok := f.fb.queryFields[field]; ok { - f.fb.sort = append(f.fb.sort, &SortField{ - Field: field, - Descending: descending, - }) - } - } - return f -} - -func (f *baseFilter) Skip(skip uint64) Filter { - f.fb.skip = skip - return f -} - -func (f *baseFilter) Limit(limit uint64) Filter { - f.fb.limit = limit - return f -} - -func (f *baseFilter) Count(c bool) Filter { - f.fb.count = c - return f -} - -func (f *baseFilter) Ascending() Filter { - f.fb.forceAscending = true - return f -} - -func (f *baseFilter) Descending() Filter { - f.fb.forceDescending = true - return f -} - -type andFilter struct { - baseFilter -} - -func (fb *andFilter) Condition(children ...Filter) MultiConditionFilter { - fb.children = append(fb.children, children...) - return fb -} - -func (fb *filterBuilder) And(and ...Filter) AndFilter { - return &andFilter{ - baseFilter: baseFilter{ - fb: fb, - op: FilterOpAnd, - children: and, - }, - } -} - -type orFilter struct { - baseFilter -} - -func (fb *orFilter) Condition(children ...Filter) MultiConditionFilter { - fb.children = append(fb.children, children...) - return fb -} - -func (fb *filterBuilder) Or(or ...Filter) OrFilter { - return &orFilter{ - baseFilter: baseFilter{ - fb: fb, - op: FilterOpOr, - children: or, - }, - } -} - -func (fb *filterBuilder) Eq(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpEq, name, value) -} - -func (fb *filterBuilder) Neq(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNeq, name, value) -} - -func (fb *filterBuilder) IEq(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpIEq, name, value) -} - -func (fb *filterBuilder) NIeq(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNIeq, name, value) -} - -func (fb *filterBuilder) In(name string, values []driver.Value) Filter { - return fb.fieldFilter(FilterOpIn, name, values) -} - -func (fb *filterBuilder) NotIn(name string, values []driver.Value) Filter { - return fb.fieldFilter(FilterOpNotIn, name, values) -} - -func (fb *filterBuilder) Lt(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpLt, name, value) -} - -func (fb *filterBuilder) Gt(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpGt, name, value) -} - -func (fb *filterBuilder) Gte(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpGte, name, value) -} - -func (fb *filterBuilder) Lte(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpLte, name, value) -} - -func (fb *filterBuilder) Contains(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpCont, name, value) -} - -func (fb *filterBuilder) NotContains(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotCont, name, value) -} - -func (fb *filterBuilder) IContains(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpICont, name, value) -} - -func (fb *filterBuilder) NotIContains(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotICont, name, value) -} - -func (fb *filterBuilder) StartsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpStartsWith, name, value) -} - -func (fb *filterBuilder) NotStartsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotStartsWith, name, value) -} - -func (fb *filterBuilder) IStartsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpIStartsWith, name, value) -} - -func (fb *filterBuilder) NotIStartsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotIStartsWith, name, value) -} - -func (fb *filterBuilder) EndsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpEndsWith, name, value) -} - -func (fb *filterBuilder) NotEndsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotEndsWith, name, value) -} - -func (fb *filterBuilder) IEndsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpIEndsWith, name, value) -} - -func (fb *filterBuilder) NotIEndsWith(name string, value driver.Value) Filter { - return fb.fieldFilter(FilterOpNotIEndsWith, name, value) -} - -func (fb *filterBuilder) fieldFilter(op FilterOp, name string, value interface{}) Filter { - return &fieldFilter{ - baseFilter: baseFilter{ - fb: fb, - op: op, - field: name, - value: value, - }, - } -} - -type fieldFilter struct { - baseFilter -} diff --git a/pkg/database/filter_test.go b/pkg/database/filter_test.go deleted file mode 100644 index 5e303e8379..0000000000 --- a/pkg/database/filter_test.go +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package database - -import ( - "context" - "database/sql/driver" - "testing" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly/pkg/core" - "github.com/stretchr/testify/assert" -) - -func TestBuildMessageFilter(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And(). - Condition(fb.Eq("tag", "tag1")). - Condition(fb.Or(). - Condition(fb.Eq("id", "35c11cba-adff-4a4d-970a-02e3a0858dc8")). - Condition(fb.Eq("id", "caefb9d1-9fc9-4d6a-a155-514d3139adf7")), - ). - Condition(fb.Gt("sequence", 12345)). - Condition(fb.Eq("confirmed", nil)). - Skip(50). - Limit(25). - Count(true). - Sort("tag"). - Descending(). - Finalize() - assert.NoError(t, err) - assert.Equal(t, "( tag == 'tag1' ) && ( ( id == '35c11cba-adff-4a4d-970a-02e3a0858dc8' ) || ( id == 'caefb9d1-9fc9-4d6a-a155-514d3139adf7' ) ) && ( sequence >> 12345 ) && ( confirmed == null ) sort=-tag skip=50 limit=25 count=true", f.String()) -} - -func TestBuildMessageFilter2(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.Gt("sequence", "0"). - Sort("sequence"). - Ascending(). - Finalize() - - assert.NoError(t, err) - assert.Equal(t, "sequence >> 0 sort=sequence", f.String()) -} - -func TestBuildMessageFilter3(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.In("created", []driver.Value{1, 2, 3}), - fb.NotIn("created", []driver.Value{1, 2, 3}), - fb.Lt("created", "0"), - fb.Lte("created", "0"), - fb.Gte("created", "0"), - fb.Neq("created", "0"), - fb.Gt("sequence", 12345), - fb.Contains("topics", "abc"), - fb.NotContains("topics", "def"), - fb.IContains("topics", "ghi"), - fb.NotIContains("topics", "jkl"), - ). - Sort("-created"). - Sort("topics"). - Sort("-sequence"). - Finalize() - assert.NoError(t, err) - assert.Equal(t, "( created IN [1000000000,2000000000,3000000000] ) && ( created NI [1000000000,2000000000,3000000000] ) && ( created << 0 ) && ( created <= 0 ) && ( created >= 0 ) && ( created != 0 ) && ( sequence >> 12345 ) && ( topics %= 'abc' ) && ( topics !% 'def' ) && ( topics :% 'ghi' ) && ( topics ;% 'jkl' ) sort=-created,topics,-sequence", f.String()) -} - -func TestBuildMessageFilter4(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.IEq("topics", "abc"), - fb.NIeq("topics", "bcd"), - fb.StartsWith("topics", "cde"), - fb.IStartsWith("topics", "def"), - fb.NotStartsWith("topics", "efg"), - fb.NotIStartsWith("topics", "fgh"), - fb.EndsWith("topics", "hij"), - fb.IEndsWith("topics", "ikl"), - fb.NotEndsWith("topics", "lmn"), - fb.NotIEndsWith("topics", "mno"), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( topics := 'abc' ) && ( topics ;= 'bcd' ) && ( topics ^= 'cde' ) && ( topics :^ 'def' ) && ( topics !^ 'efg' ) && ( topics ;^ 'fgh' ) && ( topics $= 'hij' ) && ( topics :$ 'ikl' ) && ( topics !$ 'lmn' ) && ( topics ;$ 'mno' )", f.String()) -} - -func TestBuildMessageBadInFilterField(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.In("!wrong", []driver.Value{"a", "b", "c"}), - ).Finalize() - assert.Regexp(t, "FF00142", err) -} - -func TestBuildMessageBadInFilterValue(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.In("sequence", []driver.Value{"!integer"}), - ).Finalize() - assert.Regexp(t, "FF00143", err) -} - -func TestBuildMessageUUIDConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - u := fftypes.MustParseUUID("4066ABDC-8BBD-4472-9D29-1A55B467F9B9") - b32 := fftypes.UUIDBytes(u) - var nilB32 *fftypes.Bytes32 - f, err := fb.And( - fb.Eq("id", u), - fb.Eq("id", *u), - fb.In("id", []driver.Value{*u}), - fb.Eq("id", u.String()), - fb.Neq("id", nil), - fb.Eq("id", b32), - fb.Neq("id", *b32), - fb.Eq("id", ""), - fb.Eq("id", nilB32), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( id == '4066abdc-8bbd-4472-9d29-1a55b467f9b9' ) && ( id == '4066abdc-8bbd-4472-9d29-1a55b467f9b9' ) && ( id IN ['4066abdc-8bbd-4472-9d29-1a55b467f9b9'] ) && ( id == '4066abdc-8bbd-4472-9d29-1a55b467f9b9' ) && ( id != null ) && ( id == '4066abdc-8bbd-4472-9d29-1a55b467f9b9' ) && ( id != '4066abdc-8bbd-4472-9d29-1a55b467f9b9' ) && ( id == '' ) && ( id == null )", f.String()) -} - -func TestBuildMessageBytes32Convert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - b32, _ := fftypes.ParseBytes32(context.Background(), "7f4806535f8b3d9bf178af053d2bbdb46047365466ed16bbb0732a71492bdaf0") - var nilB32 *fftypes.Bytes32 - f, err := fb.And( - fb.Eq("hash", b32), - fb.Eq("hash", *b32), - fb.In("hash", []driver.Value{*b32}), - fb.Eq("hash", b32.String()), - fb.Neq("hash", nil), - fb.Eq("hash", ""), - fb.Eq("hash", nilB32), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( hash == '7f4806535f8b3d9bf178af053d2bbdb46047365466ed16bbb0732a71492bdaf0' ) && ( hash == '7f4806535f8b3d9bf178af053d2bbdb46047365466ed16bbb0732a71492bdaf0' ) && ( hash IN ['7f4806535f8b3d9bf178af053d2bbdb46047365466ed16bbb0732a71492bdaf0'] ) && ( hash == '7f4806535f8b3d9bf178af053d2bbdb46047365466ed16bbb0732a71492bdaf0' ) && ( hash != null ) && ( hash == '' ) && ( hash == null )", f.String()) -} -func TestBuildMessageIntConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.Lt("sequence", int(111)), - fb.Lt("sequence", int32(222)), - fb.Lt("sequence", int64(333)), - fb.Lt("sequence", uint(444)), - fb.Lt("sequence", uint32(555)), - fb.Lt("sequence", uint64(666)), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( sequence << 111 ) && ( sequence << 222 ) && ( sequence << 333 ) && ( sequence << 444 ) && ( sequence << 555 ) && ( sequence << 666 )", f.String()) -} - -func TestBuildMessageTimeConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.Gt("created", int64(1621112824)), - fb.Gt("created", 0), - fb.Eq("created", "2021-05-15T21:07:54.123456789Z"), - fb.Eq("created", nil), - fb.Lt("created", fftypes.UnixTime(1621112824)), - fb.Lt("created", *fftypes.UnixTime(1621112824)), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( created >> 1621112824000000000 ) && ( created >> 0 ) && ( created == 1621112874123456789 ) && ( created == null ) && ( created << 1621112824000000000 ) && ( created << 1621112824000000000 )", f.String()) -} - -func TestBuildMessageStringConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - u := fftypes.MustParseUUID("3f96e0d5-a10e-47c6-87a0-f2e7604af179") - b32 := fftypes.UUIDBytes(u) - f, err := fb.And( - fb.Lt("tag", int(111)), - fb.Lt("tag", int32(222)), - fb.Lt("tag", int64(333)), - fb.Lt("tag", uint(444)), - fb.Lt("tag", uint32(555)), - fb.Lt("tag", uint64(666)), - fb.Lt("tag", *u), - fb.Lt("tag", u), - fb.Lt("tag", *b32), - fb.Lt("tag", b32), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( tag << '111' ) && ( tag << '222' ) && ( tag << '333' ) && ( tag << '444' ) && ( tag << '555' ) && ( tag << '666' ) && ( tag << '3f96e0d5-a10e-47c6-87a0-f2e7604af179' ) && ( tag << '3f96e0d5-a10e-47c6-87a0-f2e7604af179' ) && ( tag << '3f96e0d5a10e47c687a0f2e7604af17900000000000000000000000000000000' ) && ( tag << '3f96e0d5a10e47c687a0f2e7604af17900000000000000000000000000000000' )", f.String()) -} - -func TestBuildMessageBoolConvert(t *testing.T) { - fb := PinQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.Eq("masked", false), - fb.Eq("masked", true), - fb.Eq("masked", "false"), - fb.Eq("masked", "true"), - fb.Eq("masked", "True"), - fb.Eq("masked", ""), - fb.Eq("masked", int(111)), - fb.Eq("masked", int32(222)), - fb.Eq("masked", int64(333)), - fb.Eq("masked", uint(444)), - fb.Eq("masked", uint32(555)), - fb.Eq("masked", uint64(666)), - fb.Eq("masked", nil), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, "( masked == false ) && ( masked == true ) && ( masked == false ) && ( masked == true ) && ( masked == true ) && ( masked == false ) && ( masked == true ) && ( masked == true ) && ( masked == true ) && ( masked == true ) && ( masked == true ) && ( masked == true ) && ( masked == null )", f.String()) -} - -func TestBuildMessageJSONConvert(t *testing.T) { - fb := OperationQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.Eq("output", nil), - fb.Eq("output", `{}`), - fb.Eq("output", []byte(`{}`)), - fb.Eq("output", fftypes.JSONObject{"some": "value"}), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, `( output == null ) && ( output == '{}' ) && ( output == '{}' ) && ( output == '{"some":"value"}' )`, f.String()) -} - -func TestBuildFFStringArrayConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - f, err := fb.And( - fb.Eq("topics", nil), - fb.Eq("topics", `test1`), - fb.Eq("topics", []byte(`test2`)), - ).Finalize() - assert.NoError(t, err) - assert.Equal(t, `( topics == null ) && ( topics == 'test1' ) && ( topics == 'test2' )`, f.String()) -} - -func TestBuildMessageFailStringConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("tag", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*tag", err) -} - -func TestBuildMessageFailBoolConvert(t *testing.T) { - fb := PinQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("masked", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*masked", err) -} - -func TestBuildMessageFailBypes32Convert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("group", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*group", err) -} - -func TestBuildMessageFailInt64Convert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("sequence", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*sequence", err) -} - -func TestBuildMessageFailTimeConvert(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("created", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*created", err) -} - -func TestQueryFactoryBadField(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.Eq("wrong", "ns1"), - ).Finalize() - assert.Regexp(t, "FF00142.*wrong", err) -} - -func TestQueryFactoryBadValue(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.Eq("sequence", "not an int"), - ).Finalize() - assert.Regexp(t, "FF00143.*sequence", err) -} - -func TestQueryFactoryBadNestedValue(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.And( - fb.Eq("sequence", "not an int"), - ), - ).Finalize() - assert.Regexp(t, "FF00143.*sequence", err) -} - -func TestQueryFactoryStringMatchNonString(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.Contains("sequence", "stuff"), - ).Finalize() - assert.Regexp(t, "FF00145", err) -} - -func TestQueryFactoryNullGreaterThan(t *testing.T) { - fb := DataQueryFactory.NewFilter(context.Background()) - _, err := fb.And( - fb.Gt("created", nil), - ).Finalize() - assert.Regexp(t, "FF00144", err) -} - -func TestQueryFactoryGetFields(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()) - assert.NotNil(t, fb.Fields()) -} - -func TestQueryFactoryGetBuilder(t *testing.T) { - fb := MessageQueryFactory.NewFilter(context.Background()).Gt("sequence", 0) - assert.NotNil(t, fb.Builder()) -} - -func TestBuildMessageFailJSONConvert(t *testing.T) { - fb := OperationQueryFactory.NewFilter(context.Background()) - _, err := fb.Lt("output", map[bool]bool{true: false}).Finalize() - assert.Regexp(t, "FF00143.*output", err) -} - -func TestStringsForTypes(t *testing.T) { - - assert.Equal(t, "test", (&stringField{s: "test"}).String()) - assert.Equal(t, "037a025d-681d-4150-a413-05f368729c66", (&uuidField{fftypes.MustParseUUID("037a025d-681d-4150-a413-05f368729c66")}).String()) - b32 := fftypes.NewRandB32() - assert.Equal(t, b32.String(), (&bytes32Field{b32: b32}).String()) - assert.Equal(t, "12345", (&int64Field{i: 12345}).String()) - now := fftypes.Now() - assert.Equal(t, now.String(), (&timeField{t: now}).String()) - assert.Equal(t, `{"some":"value"}`, (&jsonField{b: []byte(`{"some":"value"}`)}).String()) - assert.Equal(t, "t1,t2", (&ffNameArrayField{na: core.FFStringArray{"t1", "t2"}}).String()) - assert.Equal(t, "true", (&boolField{b: true}).String()) -} diff --git a/pkg/database/plugin.go b/pkg/database/plugin.go index 0beb36abea..5a00162f23 100644 --- a/pkg/database/plugin.go +++ b/pkg/database/plugin.go @@ -20,6 +20,7 @@ import ( "context" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly/internal/coremsgs" @@ -84,26 +85,26 @@ type iMessageCollection interface { InsertMessages(ctx context.Context, messages []*core.Message, hooks ...PostCompletionHook) (err error) // UpdateMessage - Update message - UpdateMessage(ctx context.Context, namespace string, id *fftypes.UUID, update Update) (err error) + UpdateMessage(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) // ReplaceMessage updates the message, and assigns it a new sequence number at the front of the list. // A new event is raised for the message, with the new sequence number - as if it was brand new. ReplaceMessage(ctx context.Context, message *core.Message) (err error) // UpdateMessages - Update messages - UpdateMessages(ctx context.Context, namespace string, filter Filter, update Update) (err error) + UpdateMessages(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) (err error) // GetMessageByID - Get a message by ID GetMessageByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Message, err error) // GetMessages - List messages, reverse sorted (newest first) by Confirmed then Created, with pagination, and simple must filters - GetMessages(ctx context.Context, namespace string, filter Filter) (message []*core.Message, res *FilterResult, err error) + GetMessages(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Message, res *ffapi.FilterResult, err error) // GetMessageIDs - Retrieves messages, but only querying the messages ID (no other fields) - GetMessageIDs(ctx context.Context, namespace string, filter Filter) (ids []*core.IDAndSequence, err error) + GetMessageIDs(ctx context.Context, namespace string, filter ffapi.Filter) (ids []*core.IDAndSequence, err error) // GetMessagesForData - List messages where there is a data reference to the specified ID - GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter Filter) (message []*core.Message, res *FilterResult, err error) + GetMessagesForData(ctx context.Context, namespace string, dataID *fftypes.UUID, filter ffapi.Filter) (message []*core.Message, res *ffapi.FilterResult, err error) // GetBatchIDsForMessages - an optimized query to retrieve any non-null batch IDs for a list of message IDs GetBatchIDsForMessages(ctx context.Context, namespace string, msgIDs []*fftypes.UUID) (batchIDs []*fftypes.UUID, err error) @@ -122,16 +123,16 @@ type iDataCollection interface { InsertDataArray(ctx context.Context, data core.DataArray) (err error) // UpdateData - Update data - UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update Update) (err error) + UpdateData(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) // GetDataByID - Get a data record by ID GetDataByID(ctx context.Context, namespace string, id *fftypes.UUID, withValue bool) (message *core.Data, err error) // GetData - Get data - GetData(ctx context.Context, namespace string, filter Filter) (message core.DataArray, res *FilterResult, err error) + GetData(ctx context.Context, namespace string, filter ffapi.Filter) (message core.DataArray, res *ffapi.FilterResult, err error) // GetDataRefs - Get data references only (no data) - GetDataRefs(ctx context.Context, namespace string, filter Filter) (message core.DataRefs, res *FilterResult, err error) + GetDataRefs(ctx context.Context, namespace string, filter ffapi.Filter) (message core.DataRefs, res *ffapi.FilterResult, err error) } type iBatchCollection interface { @@ -139,13 +140,13 @@ type iBatchCollection interface { UpsertBatch(ctx context.Context, data *core.BatchPersisted) (err error) // UpdateBatch - Update data - UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update Update) (err error) + UpdateBatch(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) // GetBatchByID - Get a batch by ID GetBatchByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.BatchPersisted, err error) // GetBatches - Get batches - GetBatches(ctx context.Context, namespace string, filter Filter) (message []*core.BatchPersisted, res *FilterResult, err error) + GetBatches(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.BatchPersisted, res *ffapi.FilterResult, err error) } type iTransactionCollection interface { @@ -153,13 +154,13 @@ type iTransactionCollection interface { InsertTransaction(ctx context.Context, data *core.Transaction) (err error) // UpdateTransaction - Update transaction - UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update Update) (err error) + UpdateTransaction(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) // GetTransactionByID - Get a transaction by ID GetTransactionByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Transaction, err error) // GetTransactions - Get transactions - GetTransactions(ctx context.Context, namespace string, filter Filter) (message []*core.Transaction, res *FilterResult, err error) + GetTransactions(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Transaction, res *ffapi.FilterResult, err error) } type iDatatypeCollection interface { @@ -173,7 +174,7 @@ type iDatatypeCollection interface { GetDatatypeByName(ctx context.Context, namespace, name, version string) (datadef *core.Datatype, err error) // GetDatatypes - Get data definitions - GetDatatypes(ctx context.Context, namespace string, filter Filter) (datadef []*core.Datatype, res *FilterResult, err error) + GetDatatypes(ctx context.Context, namespace string, filter ffapi.Filter) (datadef []*core.Datatype, res *ffapi.FilterResult, err error) } type iOffsetCollection interface { @@ -181,13 +182,13 @@ type iOffsetCollection interface { UpsertOffset(ctx context.Context, data *core.Offset, allowExisting bool) (err error) // UpdateOffset - Update offset - UpdateOffset(ctx context.Context, rowID int64, update Update) (err error) + UpdateOffset(ctx context.Context, rowID int64, update ffapi.Update) (err error) // GetOffset - Get an offset by name GetOffset(ctx context.Context, t core.OffsetType, name string) (offset *core.Offset, err error) // GetOffsets - Get offsets - GetOffsets(ctx context.Context, filter Filter) (offset []*core.Offset, res *FilterResult, err error) + GetOffsets(ctx context.Context, filter ffapi.Filter) (offset []*core.Offset, res *ffapi.FilterResult, err error) // DeleteOffset - Delete an offset by name DeleteOffset(ctx context.Context, t core.OffsetType, name string) (err error) @@ -201,10 +202,10 @@ type iPinCollection interface { UpsertPin(ctx context.Context, parked *core.Pin) (err error) // GetPins - Get pins - GetPins(ctx context.Context, namespace string, filter Filter) (offset []*core.Pin, res *FilterResult, err error) + GetPins(ctx context.Context, namespace string, filter ffapi.Filter) (offset []*core.Pin, res *ffapi.FilterResult, err error) // UpdatePins - Updates pins - UpdatePins(ctx context.Context, namespace string, filter Filter, update Update) (err error) + UpdatePins(ctx context.Context, namespace string, filter ffapi.Filter, update ffapi.Update) (err error) } type iOperationCollection interface { @@ -212,13 +213,13 @@ type iOperationCollection interface { InsertOperation(ctx context.Context, operation *core.Operation, hooks ...PostCompletionHook) (err error) // UpdateOperation - Update an operation - UpdateOperation(ctx context.Context, namespace string, id *fftypes.UUID, update Update) (err error) + UpdateOperation(ctx context.Context, namespace string, id *fftypes.UUID, update ffapi.Update) (err error) // GetOperationByID - Get an operation by ID GetOperationByID(ctx context.Context, namespace string, id *fftypes.UUID) (operation *core.Operation, err error) // GetOperations - Get operation - GetOperations(ctx context.Context, namespace string, filter Filter) (operation []*core.Operation, res *FilterResult, err error) + GetOperations(ctx context.Context, namespace string, filter ffapi.Filter) (operation []*core.Operation, res *ffapi.FilterResult, err error) } type iSubscriptionCollection interface { @@ -227,7 +228,7 @@ type iSubscriptionCollection interface { // UpdateSubscription - Update subscription // Throws IDMismatch error if updating and ids don't match - UpdateSubscription(ctx context.Context, namespace, name string, update Update) (err error) + UpdateSubscription(ctx context.Context, namespace, name string, update ffapi.Update) (err error) // GetSubscriptionByName - Get an subscription by name GetSubscriptionByName(ctx context.Context, namespace, name string) (offset *core.Subscription, err error) @@ -236,7 +237,7 @@ type iSubscriptionCollection interface { GetSubscriptionByID(ctx context.Context, namespace string, id *fftypes.UUID) (offset *core.Subscription, err error) // GetSubscriptions - Get subscriptions - GetSubscriptions(ctx context.Context, namespace string, filter Filter) (offset []*core.Subscription, res *FilterResult, err error) + GetSubscriptions(ctx context.Context, namespace string, filter ffapi.Filter) (offset []*core.Subscription, res *ffapi.FilterResult, err error) // DeleteSubscriptionByID - Delete a subscription DeleteSubscriptionByID(ctx context.Context, namespace string, id *fftypes.UUID) (err error) @@ -253,7 +254,7 @@ type iEventCollection interface { GetEventByID(ctx context.Context, namespace string, id *fftypes.UUID) (message *core.Event, err error) // GetEvents - Get events - GetEvents(ctx context.Context, namespace string, filter Filter) (message []*core.Event, res *FilterResult, err error) + GetEvents(ctx context.Context, namespace string, filter ffapi.Filter) (message []*core.Event, res *ffapi.FilterResult, err error) } type iIdentitiesCollection interface { @@ -270,7 +271,7 @@ type iIdentitiesCollection interface { GetIdentityByID(ctx context.Context, namespace string, id *fftypes.UUID) (org *core.Identity, err error) // GetIdentities - Get identities - GetIdentities(ctx context.Context, namespace string, filter Filter) (org []*core.Identity, res *FilterResult, err error) + GetIdentities(ctx context.Context, namespace string, filter ffapi.Filter) (org []*core.Identity, res *ffapi.FilterResult, err error) } type iVerifiersCollection interface { @@ -284,7 +285,7 @@ type iVerifiersCollection interface { GetVerifierByHash(ctx context.Context, namespace string, hash *fftypes.Bytes32) (org *core.Verifier, err error) // GetVerifiers - Get verifiers - GetVerifiers(ctx context.Context, namespace string, filter Filter) (org []*core.Verifier, res *FilterResult, err error) + GetVerifiers(ctx context.Context, namespace string, filter ffapi.Filter) (org []*core.Verifier, res *ffapi.FilterResult, err error) } type iGroupCollection interface { @@ -295,7 +296,7 @@ type iGroupCollection interface { GetGroupByHash(ctx context.Context, namespace string, hash *fftypes.Bytes32) (node *core.Group, err error) // GetGroups - Get groups - GetGroups(ctx context.Context, namespace string, filter Filter) (node []*core.Group, res *FilterResult, err error) + GetGroups(ctx context.Context, namespace string, filter ffapi.Filter) (node []*core.Group, res *ffapi.FilterResult, err error) } type iNonceCollection interface { @@ -309,7 +310,7 @@ type iNonceCollection interface { GetNonce(ctx context.Context, hash *fftypes.Bytes32) (message *core.Nonce, err error) // GetNonces - Get contexts - GetNonces(ctx context.Context, filter Filter) (node []*core.Nonce, res *FilterResult, err error) + GetNonces(ctx context.Context, filter ffapi.Filter) (node []*core.Nonce, res *ffapi.FilterResult, err error) // DeleteNonce - Delete context by hash DeleteNonce(ctx context.Context, hash *fftypes.Bytes32) (err error) @@ -323,7 +324,7 @@ type iNextPinCollection interface { GetNextPinsForContext(ctx context.Context, namespace string, context *fftypes.Bytes32) (message []*core.NextPin, err error) // UpdateNextPin - update a next hash using its local database ID - UpdateNextPin(ctx context.Context, namespace string, sequence int64, update Update) (err error) + UpdateNextPin(ctx context.Context, namespace string, sequence int64, update ffapi.Update) (err error) } type iBlobCollection interface { @@ -337,7 +338,7 @@ type iBlobCollection interface { GetBlobMatchingHash(ctx context.Context, hash *fftypes.Bytes32) (message *core.Blob, err error) // GetBlobs - get blobs - GetBlobs(ctx context.Context, filter Filter) (message []*core.Blob, res *FilterResult, err error) + GetBlobs(ctx context.Context, filter ffapi.Filter) (message []*core.Blob, res *ffapi.FilterResult, err error) // DeleteBlob - delete a blob, using its local database ID DeleteBlob(ctx context.Context, sequence int64) (err error) @@ -357,7 +358,7 @@ type iTokenPoolCollection interface { GetTokenPoolByLocator(ctx context.Context, namespace, connector, locator string) (*core.TokenPool, error) // GetTokenPools - Get token pools - GetTokenPools(ctx context.Context, namespace string, filter Filter) ([]*core.TokenPool, *FilterResult, error) + GetTokenPools(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenPool, *ffapi.FilterResult, error) } type iTokenBalanceCollection interface { @@ -368,13 +369,13 @@ type iTokenBalanceCollection interface { GetTokenBalance(ctx context.Context, namespace string, poolID *fftypes.UUID, tokenIndex, identity string) (*core.TokenBalance, error) // GetTokenBalances - Get token balances - GetTokenBalances(ctx context.Context, namespace string, filter Filter) ([]*core.TokenBalance, *FilterResult, error) + GetTokenBalances(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenBalance, *ffapi.FilterResult, error) // GetTokenAccounts - Get token accounts (all distinct addresses that have a balance) - GetTokenAccounts(ctx context.Context, namespace string, filter Filter) ([]*core.TokenAccount, *FilterResult, error) + GetTokenAccounts(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenAccount, *ffapi.FilterResult, error) // GetTokenAccountPools - Get the list of pools referenced by a given account - GetTokenAccountPools(ctx context.Context, namespace, key string, filter Filter) ([]*core.TokenAccountPool, *FilterResult, error) + GetTokenAccountPools(ctx context.Context, namespace, key string, filter ffapi.Filter) ([]*core.TokenAccountPool, *ffapi.FilterResult, error) } type iTokenTransferCollection interface { @@ -388,7 +389,7 @@ type iTokenTransferCollection interface { GetTokenTransferByProtocolID(ctx context.Context, namespace, connector, protocolID string) (*core.TokenTransfer, error) // GetTokenTransfers - Get token transfers - GetTokenTransfers(ctx context.Context, namespace string, filter Filter) ([]*core.TokenTransfer, *FilterResult, error) + GetTokenTransfers(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenTransfer, *ffapi.FilterResult, error) } type iTokenApprovalCollection interface { @@ -396,7 +397,7 @@ type iTokenApprovalCollection interface { UpsertTokenApproval(ctx context.Context, approval *core.TokenApproval) error // UpdateTokenApprovals - Update multiple token approvals - UpdateTokenApprovals(ctx context.Context, filter Filter, update Update) (err error) + UpdateTokenApprovals(ctx context.Context, filter ffapi.Filter, update ffapi.Update) (err error) // GetTokenApprovalByID - Get a token approval by ID GetTokenApprovalByID(ctx context.Context, namespace string, localID *fftypes.UUID) (*core.TokenApproval, error) @@ -405,7 +406,7 @@ type iTokenApprovalCollection interface { GetTokenApprovalByProtocolID(ctx context.Context, namespace, connector, protocolID string) (*core.TokenApproval, error) // GetTokenApprovals - Get token approvals - GetTokenApprovals(ctx context.Context, namespace string, filter Filter) ([]*core.TokenApproval, *FilterResult, error) + GetTokenApprovals(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.TokenApproval, *ffapi.FilterResult, error) } type iFFICollection interface { @@ -413,7 +414,7 @@ type iFFICollection interface { UpsertFFI(ctx context.Context, cd *fftypes.FFI) error // GetFFIs - Get FFIs - GetFFIs(ctx context.Context, namespace string, filter Filter) ([]*fftypes.FFI, *FilterResult, error) + GetFFIs(ctx context.Context, namespace string, filter ffapi.Filter) ([]*fftypes.FFI, *ffapi.FilterResult, error) // GetFFIByID - Get an FFI by ID GetFFIByID(ctx context.Context, namespace string, id *fftypes.UUID) (*fftypes.FFI, error) @@ -430,7 +431,7 @@ type iFFIMethodCollection interface { GetFFIMethod(ctx context.Context, namespace string, interfaceID *fftypes.UUID, pathName string) (*fftypes.FFIMethod, error) // GetFFIMethods - Get FFI methods - GetFFIMethods(ctx context.Context, namespace string, filter Filter) (methods []*fftypes.FFIMethod, res *FilterResult, err error) + GetFFIMethods(ctx context.Context, namespace string, filter ffapi.Filter) (methods []*fftypes.FFIMethod, res *ffapi.FilterResult, err error) } type iFFIEventCollection interface { @@ -441,7 +442,7 @@ type iFFIEventCollection interface { GetFFIEvent(ctx context.Context, namespace string, interfaceID *fftypes.UUID, pathName string) (*fftypes.FFIEvent, error) // GetFFIEvents - Get FFI events - GetFFIEvents(ctx context.Context, namespace string, filter Filter) (events []*fftypes.FFIEvent, res *FilterResult, err error) + GetFFIEvents(ctx context.Context, namespace string, filter ffapi.Filter) (events []*fftypes.FFIEvent, res *ffapi.FilterResult, err error) } type iContractAPICollection interface { @@ -449,7 +450,7 @@ type iContractAPICollection interface { UpsertContractAPI(ctx context.Context, cd *core.ContractAPI) error // GetContractAPIs - Get contract APIs - GetContractAPIs(ctx context.Context, namespace string, filter AndFilter) ([]*core.ContractAPI, *FilterResult, error) + GetContractAPIs(ctx context.Context, namespace string, filter ffapi.AndFilter) ([]*core.ContractAPI, *ffapi.FilterResult, error) // GetContractAPIByID - Get a contract API by ID GetContractAPIByID(ctx context.Context, namespace string, id *fftypes.UUID) (*core.ContractAPI, error) @@ -472,7 +473,7 @@ type iContractListenerCollection interface { GetContractListenerByBackendID(ctx context.Context, namespace, id string) (sub *core.ContractListener, err error) // GetContractListeners - get contract listeners - GetContractListeners(ctx context.Context, namespace string, filter Filter) ([]*core.ContractListener, *FilterResult, error) + GetContractListeners(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.ContractListener, *ffapi.FilterResult, error) // DeleteContractListener - delete a contract listener DeleteContractListenerByID(ctx context.Context, namespace string, id *fftypes.UUID) (err error) @@ -490,7 +491,7 @@ type iBlockchainEventCollection interface { GetBlockchainEventByProtocolID(ctx context.Context, namespace string, listener *fftypes.UUID, protocolID string) (*core.BlockchainEvent, error) // GetBlockchainEvents - get blockchain events - GetBlockchainEvents(ctx context.Context, namespace string, filter Filter) ([]*core.BlockchainEvent, *FilterResult, error) + GetBlockchainEvents(ctx context.Context, namespace string, filter ffapi.Filter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) } // PersistenceInterface are the operations that must be implemented by a database interface plugin. @@ -668,322 +669,322 @@ type Capabilities struct { } // MessageQueryFactory filter fields for messages -var MessageQueryFactory = &queryFields{ - "id": &UUIDField{}, - "cid": &UUIDField{}, - "type": &StringField{}, - "author": &StringField{}, - "key": &StringField{}, - "topics": &FFStringArrayField{}, - "tag": &StringField{}, - "group": &Bytes32Field{}, - "created": &TimeField{}, - "datahash": &Bytes32Field{}, - "idempotencykey": &StringField{}, - "hash": &Bytes32Field{}, - "pins": &FFStringArrayField{}, - "state": &StringField{}, - "confirmed": &TimeField{}, - "sequence": &Int64Field{}, - "txtype": &StringField{}, - "batch": &UUIDField{}, +var MessageQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "cid": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "author": &ffapi.StringField{}, + "key": &ffapi.StringField{}, + "topics": &ffapi.FFStringArrayField{}, + "tag": &ffapi.StringField{}, + "group": &ffapi.Bytes32Field{}, + "created": &ffapi.TimeField{}, + "datahash": &ffapi.Bytes32Field{}, + "idempotencykey": &ffapi.StringField{}, + "hash": &ffapi.Bytes32Field{}, + "pins": &ffapi.FFStringArrayField{}, + "state": &ffapi.StringField{}, + "confirmed": &ffapi.TimeField{}, + "sequence": &ffapi.Int64Field{}, + "txtype": &ffapi.StringField{}, + "batch": &ffapi.UUIDField{}, } // BatchQueryFactory filter fields for batches -var BatchQueryFactory = &queryFields{ - "id": &UUIDField{}, - "type": &StringField{}, - "author": &StringField{}, - "key": &StringField{}, - "group": &Bytes32Field{}, - "hash": &Bytes32Field{}, - "payloadref": &StringField{}, - "created": &TimeField{}, - "confirmed": &TimeField{}, - "tx.type": &StringField{}, - "tx.id": &UUIDField{}, - "node": &UUIDField{}, +var BatchQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "author": &ffapi.StringField{}, + "key": &ffapi.StringField{}, + "group": &ffapi.Bytes32Field{}, + "hash": &ffapi.Bytes32Field{}, + "payloadref": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, + "confirmed": &ffapi.TimeField{}, + "tx.type": &ffapi.StringField{}, + "tx.id": &ffapi.UUIDField{}, + "node": &ffapi.UUIDField{}, } // TransactionQueryFactory filter fields for transactions -var TransactionQueryFactory = &queryFields{ - "id": &UUIDField{}, - "type": &StringField{}, - "created": &TimeField{}, - "idempotencykey": &StringField{}, - "blockchainids": &FFStringArrayField{}, +var TransactionQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, + "idempotencykey": &ffapi.StringField{}, + "blockchainids": &ffapi.FFStringArrayField{}, } // DataQueryFactory filter fields for data -var DataQueryFactory = &queryFields{ - "id": &UUIDField{}, - "validator": &StringField{}, - "datatype.name": &StringField{}, - "datatype.version": &StringField{}, - "hash": &Bytes32Field{}, - "blob.hash": &Bytes32Field{}, - "blob.public": &StringField{}, - "blob.name": &StringField{}, - "blob.size": &Int64Field{}, - "created": &TimeField{}, - "value": &JSONField{}, - "public": &StringField{}, +var DataQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "validator": &ffapi.StringField{}, + "datatype.name": &ffapi.StringField{}, + "datatype.version": &ffapi.StringField{}, + "hash": &ffapi.Bytes32Field{}, + "blob.hash": &ffapi.Bytes32Field{}, + "blob.public": &ffapi.StringField{}, + "blob.name": &ffapi.StringField{}, + "blob.size": &ffapi.Int64Field{}, + "created": &ffapi.TimeField{}, + "value": &ffapi.JSONField{}, + "public": &ffapi.StringField{}, } // DatatypeQueryFactory filter fields for data definitions -var DatatypeQueryFactory = &queryFields{ - "id": &UUIDField{}, - "message": &UUIDField{}, - "validator": &StringField{}, - "name": &StringField{}, - "version": &StringField{}, - "created": &TimeField{}, +var DatatypeQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "message": &ffapi.UUIDField{}, + "validator": &ffapi.StringField{}, + "name": &ffapi.StringField{}, + "version": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, } // OffsetQueryFactory filter fields for data offsets -var OffsetQueryFactory = &queryFields{ - "name": &StringField{}, - "type": &StringField{}, - "current": &Int64Field{}, +var OffsetQueryFactory = &ffapi.QueryFields{ + "name": &ffapi.StringField{}, + "type": &ffapi.StringField{}, + "current": &ffapi.Int64Field{}, } // OperationQueryFactory filter fields for data operations -var OperationQueryFactory = &queryFields{ - "id": &UUIDField{}, - "tx": &UUIDField{}, - "type": &StringField{}, - "status": &StringField{}, - "error": &StringField{}, - "plugin": &StringField{}, - "input": &JSONField{}, - "output": &JSONField{}, - "created": &TimeField{}, - "updated": &TimeField{}, - "retry": &UUIDField{}, +var OperationQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "tx": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "status": &ffapi.StringField{}, + "error": &ffapi.StringField{}, + "plugin": &ffapi.StringField{}, + "input": &ffapi.JSONField{}, + "output": &ffapi.JSONField{}, + "created": &ffapi.TimeField{}, + "updated": &ffapi.TimeField{}, + "retry": &ffapi.UUIDField{}, } // SubscriptionQueryFactory filter fields for data subscriptions -var SubscriptionQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "transport": &StringField{}, - "events": &StringField{}, - "filters": &JSONField{}, - "options": &StringField{}, - "created": &TimeField{}, +var SubscriptionQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "transport": &ffapi.StringField{}, + "events": &ffapi.StringField{}, + "filters": &ffapi.JSONField{}, + "options": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, } // EventQueryFactory filter fields for data events -var EventQueryFactory = &queryFields{ - "id": &UUIDField{}, - "type": &StringField{}, - "reference": &UUIDField{}, - "correlator": &UUIDField{}, - "tx": &UUIDField{}, - "topic": &StringField{}, - "sequence": &Int64Field{}, - "created": &TimeField{}, +var EventQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "reference": &ffapi.UUIDField{}, + "correlator": &ffapi.UUIDField{}, + "tx": &ffapi.UUIDField{}, + "topic": &ffapi.StringField{}, + "sequence": &ffapi.Int64Field{}, + "created": &ffapi.TimeField{}, } // PinQueryFactory filter fields for parked contexts -var PinQueryFactory = &queryFields{ - "sequence": &Int64Field{}, - "masked": &BoolField{}, - "hash": &Bytes32Field{}, - "batch": &UUIDField{}, - "index": &Int64Field{}, - "dispatched": &BoolField{}, - "created": &TimeField{}, +var PinQueryFactory = &ffapi.QueryFields{ + "sequence": &ffapi.Int64Field{}, + "masked": &ffapi.BoolField{}, + "hash": &ffapi.Bytes32Field{}, + "batch": &ffapi.UUIDField{}, + "index": &ffapi.Int64Field{}, + "dispatched": &ffapi.BoolField{}, + "created": &ffapi.TimeField{}, } // IdentityQueryFactory filter fields for identities -var IdentityQueryFactory = &queryFields{ - "id": &UUIDField{}, - "did": &StringField{}, - "parent": &UUIDField{}, - "messages.claim": &UUIDField{}, - "messages.verification": &UUIDField{}, - "messages.update": &UUIDField{}, - "type": &StringField{}, - "name": &StringField{}, - "description": &StringField{}, - "profile": &JSONField{}, - "created": &TimeField{}, - "updated": &TimeField{}, +var IdentityQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "did": &ffapi.StringField{}, + "parent": &ffapi.UUIDField{}, + "messages.claim": &ffapi.UUIDField{}, + "messages.verification": &ffapi.UUIDField{}, + "messages.update": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "name": &ffapi.StringField{}, + "description": &ffapi.StringField{}, + "profile": &ffapi.JSONField{}, + "created": &ffapi.TimeField{}, + "updated": &ffapi.TimeField{}, } // VerifierQueryFactory filter fields for identities -var VerifierQueryFactory = &queryFields{ - "hash": &Bytes32Field{}, - "identity": &UUIDField{}, - "type": &StringField{}, - "value": &StringField{}, - "created": &TimeField{}, +var VerifierQueryFactory = &ffapi.QueryFields{ + "hash": &ffapi.Bytes32Field{}, + "identity": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "value": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, } // GroupQueryFactory filter fields for groups -var GroupQueryFactory = &queryFields{ - "hash": &Bytes32Field{}, - "message": &UUIDField{}, - "description": &StringField{}, - "ledger": &UUIDField{}, - "created": &TimeField{}, +var GroupQueryFactory = &ffapi.QueryFields{ + "hash": &ffapi.Bytes32Field{}, + "message": &ffapi.UUIDField{}, + "description": &ffapi.StringField{}, + "ledger": &ffapi.UUIDField{}, + "created": &ffapi.TimeField{}, } // NonceQueryFactory filter fields for nonces -var NonceQueryFactory = &queryFields{ - "hash": &StringField{}, - "nonce": &Int64Field{}, +var NonceQueryFactory = &ffapi.QueryFields{ + "hash": &ffapi.StringField{}, + "nonce": &ffapi.Int64Field{}, } // NextPinQueryFactory filter fields for next pins -var NextPinQueryFactory = &queryFields{ - "context": &Bytes32Field{}, - "identity": &StringField{}, - "hash": &Bytes32Field{}, - "nonce": &Int64Field{}, +var NextPinQueryFactory = &ffapi.QueryFields{ + "context": &ffapi.Bytes32Field{}, + "identity": &ffapi.StringField{}, + "hash": &ffapi.Bytes32Field{}, + "nonce": &ffapi.Int64Field{}, } // BlobQueryFactory filter fields for config records -var BlobQueryFactory = &queryFields{ - "hash": &Bytes32Field{}, - "size": &Int64Field{}, - "payloadref": &StringField{}, - "created": &TimeField{}, +var BlobQueryFactory = &ffapi.QueryFields{ + "hash": &ffapi.Bytes32Field{}, + "size": &ffapi.Int64Field{}, + "payloadref": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, } // TokenPoolQueryFactory filter fields for token pools -var TokenPoolQueryFactory = &queryFields{ - "id": &UUIDField{}, - "type": &StringField{}, - "name": &StringField{}, - "standard": &StringField{}, - "locator": &StringField{}, - "symbol": &StringField{}, - "decimals": &Int64Field{}, - "message": &UUIDField{}, - "state": &StringField{}, - "created": &TimeField{}, - "connector": &StringField{}, - "tx.type": &StringField{}, - "tx.id": &UUIDField{}, +var TokenPoolQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, + "name": &ffapi.StringField{}, + "standard": &ffapi.StringField{}, + "locator": &ffapi.StringField{}, + "symbol": &ffapi.StringField{}, + "decimals": &ffapi.Int64Field{}, + "message": &ffapi.UUIDField{}, + "state": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, + "connector": &ffapi.StringField{}, + "tx.type": &ffapi.StringField{}, + "tx.id": &ffapi.UUIDField{}, } // TokenBalanceQueryFactory filter fields for token balances -var TokenBalanceQueryFactory = &queryFields{ - "pool": &UUIDField{}, - "tokenindex": &StringField{}, - "uri": &StringField{}, - "connector": &StringField{}, - "key": &StringField{}, - "balance": &Int64Field{}, - "updated": &TimeField{}, +var TokenBalanceQueryFactory = &ffapi.QueryFields{ + "pool": &ffapi.UUIDField{}, + "tokenindex": &ffapi.StringField{}, + "uri": &ffapi.StringField{}, + "connector": &ffapi.StringField{}, + "key": &ffapi.StringField{}, + "balance": &ffapi.Int64Field{}, + "updated": &ffapi.TimeField{}, } // TokenAccountQueryFactory filter fields for token accounts -var TokenAccountQueryFactory = &queryFields{ - "key": &StringField{}, - "updated": &TimeField{}, +var TokenAccountQueryFactory = &ffapi.QueryFields{ + "key": &ffapi.StringField{}, + "updated": &ffapi.TimeField{}, } // TokenAccountPoolQueryFactory filter fields for token account pools -var TokenAccountPoolQueryFactory = &queryFields{ - "pool": &UUIDField{}, - "updated": &TimeField{}, +var TokenAccountPoolQueryFactory = &ffapi.QueryFields{ + "pool": &ffapi.UUIDField{}, + "updated": &ffapi.TimeField{}, } // TokenTransferQueryFactory filter fields for token transfers -var TokenTransferQueryFactory = &queryFields{ - "localid": &StringField{}, - "pool": &UUIDField{}, - "tokenindex": &StringField{}, - "uri": &StringField{}, - "connector": &StringField{}, - "key": &StringField{}, - "from": &StringField{}, - "to": &StringField{}, - "amount": &Int64Field{}, - "protocolid": &StringField{}, - "message": &UUIDField{}, - "messagehash": &Bytes32Field{}, - "created": &TimeField{}, - "tx.type": &StringField{}, - "tx.id": &UUIDField{}, - "blockchainevent": &UUIDField{}, - "type": &StringField{}, -} - -var TokenApprovalQueryFactory = &queryFields{ - "localid": &StringField{}, - "pool": &UUIDField{}, - "connector": &StringField{}, - "key": &StringField{}, - "operator": &StringField{}, - "approved": &BoolField{}, - "protocolid": &StringField{}, - "subject": &StringField{}, - "active": &BoolField{}, - "created": &TimeField{}, - "tx.type": &StringField{}, - "tx.id": &UUIDField{}, - "blockchainevent": &UUIDField{}, +var TokenTransferQueryFactory = &ffapi.QueryFields{ + "localid": &ffapi.StringField{}, + "pool": &ffapi.UUIDField{}, + "tokenindex": &ffapi.StringField{}, + "uri": &ffapi.StringField{}, + "connector": &ffapi.StringField{}, + "key": &ffapi.StringField{}, + "from": &ffapi.StringField{}, + "to": &ffapi.StringField{}, + "amount": &ffapi.Int64Field{}, + "protocolid": &ffapi.StringField{}, + "message": &ffapi.UUIDField{}, + "messagehash": &ffapi.Bytes32Field{}, + "created": &ffapi.TimeField{}, + "tx.type": &ffapi.StringField{}, + "tx.id": &ffapi.UUIDField{}, + "blockchainevent": &ffapi.UUIDField{}, + "type": &ffapi.StringField{}, +} + +var TokenApprovalQueryFactory = &ffapi.QueryFields{ + "localid": &ffapi.StringField{}, + "pool": &ffapi.UUIDField{}, + "connector": &ffapi.StringField{}, + "key": &ffapi.StringField{}, + "operator": &ffapi.StringField{}, + "approved": &ffapi.BoolField{}, + "protocolid": &ffapi.StringField{}, + "subject": &ffapi.StringField{}, + "active": &ffapi.BoolField{}, + "created": &ffapi.TimeField{}, + "tx.type": &ffapi.StringField{}, + "tx.id": &ffapi.UUIDField{}, + "blockchainevent": &ffapi.UUIDField{}, } // FFIQueryFactory filter fields for contract definitions -var FFIQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "version": &StringField{}, +var FFIQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "version": &ffapi.StringField{}, } // FFIMethodQueryFactory filter fields for contract methods -var FFIMethodQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "pathname": &StringField{}, - "interface": &UUIDField{}, - "description": &StringField{}, +var FFIMethodQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "pathname": &ffapi.StringField{}, + "interface": &ffapi.UUIDField{}, + "description": &ffapi.StringField{}, } // FFIEventQueryFactory filter fields for contract events -var FFIEventQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "pathname": &StringField{}, - "interface": &UUIDField{}, - "description": &StringField{}, +var FFIEventQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "pathname": &ffapi.StringField{}, + "interface": &ffapi.UUIDField{}, + "description": &ffapi.StringField{}, } // ContractListenerQueryFactory filter fields for contract listeners -var ContractListenerQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "interface": &UUIDField{}, - "location": &JSONField{}, - "topic": &StringField{}, - "signature": &StringField{}, - "backendid": &StringField{}, - "created": &TimeField{}, - "updated": &TimeField{}, - "state": &JSONField{}, +var ContractListenerQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "interface": &ffapi.UUIDField{}, + "location": &ffapi.JSONField{}, + "topic": &ffapi.StringField{}, + "signature": &ffapi.StringField{}, + "backendid": &ffapi.StringField{}, + "created": &ffapi.TimeField{}, + "updated": &ffapi.TimeField{}, + "state": &ffapi.JSONField{}, } // BlockchainEventQueryFactory filter fields for contract events -var BlockchainEventQueryFactory = &queryFields{ - "id": &UUIDField{}, - "source": &StringField{}, - "name": &StringField{}, - "protocolid": &StringField{}, - "listener": &StringField{}, - "tx.type": &StringField{}, - "tx.id": &UUIDField{}, - "tx.blockchainid": &StringField{}, - "timestamp": &TimeField{}, +var BlockchainEventQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "source": &ffapi.StringField{}, + "name": &ffapi.StringField{}, + "protocolid": &ffapi.StringField{}, + "listener": &ffapi.StringField{}, + "tx.type": &ffapi.StringField{}, + "tx.id": &ffapi.UUIDField{}, + "tx.blockchainid": &ffapi.StringField{}, + "timestamp": &ffapi.TimeField{}, } // ContractAPIQueryFactory filter fields for Contract APIs -var ContractAPIQueryFactory = &queryFields{ - "id": &UUIDField{}, - "name": &StringField{}, - "interface": &UUIDField{}, +var ContractAPIQueryFactory = &ffapi.QueryFields{ + "id": &ffapi.UUIDField{}, + "name": &ffapi.StringField{}, + "interface": &ffapi.UUIDField{}, } diff --git a/pkg/database/query_fields.go b/pkg/database/query_fields.go deleted file mode 100644 index ea9665277e..0000000000 --- a/pkg/database/query_fields.go +++ /dev/null @@ -1,347 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package database - -import ( - "context" - "database/sql" - "database/sql/driver" - "encoding/json" - "fmt" - "reflect" - "strconv" - "strings" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly-common/pkg/i18n" - "github.com/hyperledger/firefly/pkg/core" -) - -// QueryFactory creates a filter builder in the given context, and contains the rules on -// which fields can be used by the builder (and how they are serialized) -type QueryFactory interface { - NewFilter(ctx context.Context) FilterBuilder - NewFilterLimit(ctx context.Context, defLimit uint64) FilterBuilder - NewUpdate(ctx context.Context) UpdateBuilder -} - -type queryFields map[string]Field - -func (qf *queryFields) NewFilterLimit(ctx context.Context, defLimit uint64) FilterBuilder { - return &filterBuilder{ - ctx: ctx, - queryFields: *qf, - limit: defLimit, - } -} - -func (qf *queryFields) NewFilter(ctx context.Context) FilterBuilder { - return qf.NewFilterLimit(ctx, 0) -} - -func (qf *queryFields) NewUpdate(ctx context.Context) UpdateBuilder { - return &updateBuilder{ - ctx: ctx, - queryFields: *qf, - } -} - -// FieldSerialization - we stand on the shoulders of the well adopted SQL serialization interface here to help us define what -// string<->value looks like, even though this plugin interface is not tightly coupled to SQL. -type FieldSerialization interface { - driver.Valuer - sql.Scanner // Implementations can assume the value is ALWAYS a string -} - -type Field interface { - getSerialization() FieldSerialization - description() string - filterAsString() bool -} - -// nullField is a special FieldSerialization used to represent nil in queries -type nullField struct{} - -func (f *nullField) Scan(src interface{}) error { - return nil -} -func (f *nullField) Value() (driver.Value, error) { return nil, nil } -func (f *nullField) String() string { return fftypes.NullString } - -type StringField struct{} -type stringField struct{ s string } - -func (f *stringField) Scan(src interface{}) error { - switch tv := src.(type) { - case string: - f.s = tv - case int: - f.s = strconv.FormatInt(int64(tv), 10) - case int32: - f.s = strconv.FormatInt(int64(tv), 10) - case int64: - f.s = strconv.FormatInt(tv, 10) - case uint: - f.s = strconv.FormatInt(int64(tv), 10) - case uint32: - f.s = strconv.FormatInt(int64(tv), 10) - case uint64: - f.s = strconv.FormatInt(int64(tv), 10) - case *fftypes.UUID: - if tv != nil { - f.s = tv.String() - } - case fftypes.UUID: - f.s = tv.String() - case *fftypes.Bytes32: - if tv != nil { - f.s = tv.String() - } - case fftypes.Bytes32: - f.s = tv.String() - case nil: - f.s = "" - default: - if reflect.TypeOf(tv).Kind() == reflect.String { - // This is helpful for status enums - f.s = reflect.ValueOf(tv).String() - } else { - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.s) - } - } - return nil -} -func (f *stringField) Value() (driver.Value, error) { return f.s, nil } -func (f *stringField) String() string { return f.s } -func (f *StringField) getSerialization() FieldSerialization { return &stringField{} } -func (f *StringField) filterAsString() bool { return true } -func (f *StringField) description() string { return "String" } - -type UUIDField struct{} -type uuidField struct{ u *fftypes.UUID } - -func (f *uuidField) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case string: - if tv == "" { - f.u = nil - return nil - } - f.u, err = fftypes.ParseUUID(context.Background(), tv) - return err - case *fftypes.UUID: - f.u = tv - case fftypes.UUID: - u := tv - f.u = &u - case *fftypes.Bytes32: - if tv == nil { - f.u = nil - return nil - } - var u fftypes.UUID - copy(u[:], tv[0:16]) - f.u = &u - case fftypes.Bytes32: - var u fftypes.UUID - copy(u[:], tv[0:16]) - f.u = &u - case nil: - f.u = nil - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.u) - } - return nil -} -func (f *uuidField) Value() (driver.Value, error) { return f.u.Value() } -func (f *uuidField) String() string { return fmt.Sprintf("%v", f.u) } -func (f *UUIDField) getSerialization() FieldSerialization { return &uuidField{} } -func (f *UUIDField) filterAsString() bool { return true } -func (f *UUIDField) description() string { return "UUID" } - -type Bytes32Field struct{} -type bytes32Field struct{ b32 *fftypes.Bytes32 } - -func (f *bytes32Field) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case string: - if tv == "" { - f.b32 = nil - return nil - } - f.b32, err = fftypes.ParseBytes32(context.Background(), tv) - return err - case *fftypes.Bytes32: - f.b32 = tv - case fftypes.Bytes32: - b32 := tv - f.b32 = &b32 - case nil: - f.b32 = nil - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.b32) - } - return nil -} -func (f *bytes32Field) Value() (driver.Value, error) { return f.b32.Value() } -func (f *bytes32Field) String() string { return fmt.Sprintf("%v", f.b32) } -func (f *Bytes32Field) getSerialization() FieldSerialization { return &bytes32Field{} } -func (f *Bytes32Field) filterAsString() bool { return true } -func (f *Bytes32Field) description() string { return "Byte-Array" } - -type Int64Field struct{} -type int64Field struct{ i int64 } - -func (f *int64Field) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case int: - f.i = int64(tv) - case int32: - f.i = int64(tv) - case int64: - f.i = tv - case uint: - f.i = int64(tv) - case uint32: - f.i = int64(tv) - case uint64: - f.i = int64(tv) - case string: - f.i, err = strconv.ParseInt(src.(string), 10, 64) - if err != nil { - return i18n.WrapError(context.Background(), err, i18n.MsgTypeRestoreFailed, src, int64(0)) - } - case nil: - f.i = 0 - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.i) - } - return nil -} -func (f *int64Field) Value() (driver.Value, error) { return f.i, nil } -func (f *int64Field) String() string { return fmt.Sprintf("%d", f.i) } -func (f *Int64Field) getSerialization() FieldSerialization { return &int64Field{} } -func (f *Int64Field) filterAsString() bool { return false } -func (f *Int64Field) description() string { return "Integer" } - -type TimeField struct{} -type timeField struct{ t *fftypes.FFTime } - -func (f *timeField) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case int: - f.t = fftypes.UnixTime(int64(tv)) - case int64: - f.t = fftypes.UnixTime(tv) - case string: - f.t, err = fftypes.ParseTimeString(tv) - return err - case fftypes.FFTime: - f.t = &tv - return nil - case *fftypes.FFTime: - f.t = tv - return nil - case nil: - f.t = nil - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.t) - } - return nil -} -func (f *timeField) Value() (driver.Value, error) { - if f.t == nil { - return nil, nil - } - return f.t.UnixNano(), nil -} -func (f *timeField) String() string { return fmt.Sprintf("%v", f.t) } -func (f *TimeField) getSerialization() FieldSerialization { return &timeField{} } -func (f *TimeField) filterAsString() bool { return false } -func (f *TimeField) description() string { return "Date-time" } - -type JSONField struct{} -type jsonField struct{ b []byte } - -func (f *jsonField) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case string: - f.b = []byte(tv) - case []byte: - f.b = tv - case fftypes.JSONObject: - f.b, err = json.Marshal(tv) - case nil: - f.b = nil - case *fftypes.JSONAny: - f.b = tv.Bytes() - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.b) - } - return err -} -func (f *jsonField) Value() (driver.Value, error) { return f.b, nil } -func (f *jsonField) String() string { return string(f.b) } -func (f *JSONField) getSerialization() FieldSerialization { return &jsonField{} } -func (f *JSONField) filterAsString() bool { return true } -func (f *JSONField) description() string { return "JSON-blob" } - -type FFStringArrayField struct{} -type ffNameArrayField struct{ na core.FFStringArray } - -func (f *ffNameArrayField) Scan(src interface{}) (err error) { - return f.na.Scan(src) -} -func (f *ffNameArrayField) Value() (driver.Value, error) { return f.na.String(), nil } -func (f *ffNameArrayField) String() string { return f.na.String() } -func (f *FFStringArrayField) getSerialization() FieldSerialization { return &ffNameArrayField{} } -func (f *FFStringArrayField) filterAsString() bool { return true } -func (f *FFStringArrayField) description() string { return "String-array" } - -type BoolField struct{} -type boolField struct{ b bool } - -func (f *boolField) Scan(src interface{}) (err error) { - switch tv := src.(type) { - case int: - f.b = tv != 0 - case int32: - f.b = tv != 0 - case int64: - f.b = tv != 0 - case uint: - f.b = tv != 0 - case uint32: - f.b = tv != 0 - case uint64: - f.b = tv != 0 - case bool: - f.b = tv - case string: - f.b = strings.EqualFold(tv, "true") - case nil: - f.b = false - default: - return i18n.NewError(context.Background(), i18n.MsgTypeRestoreFailed, src, f.b) - } - return nil -} -func (f *boolField) Value() (driver.Value, error) { return f.b, nil } -func (f *boolField) String() string { return fmt.Sprintf("%t", f.b) } -func (f *BoolField) getSerialization() FieldSerialization { return &boolField{} } -func (f *BoolField) filterAsString() bool { return false } -func (f *BoolField) description() string { return "Boolean" } diff --git a/pkg/database/query_fields_test.go b/pkg/database/query_fields_test.go deleted file mode 100644 index 193b2ca9d2..0000000000 --- a/pkg/database/query_fields_test.go +++ /dev/null @@ -1,219 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package database - -import ( - "testing" - "time" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/stretchr/testify/assert" -) - -func TestNullField(t *testing.T) { - - f := nullField{} - v, err := f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - - err = f.Scan("anything") - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - - assert.Equal(t, "null", f.String()) -} - -func TestStringField(t *testing.T) { - - fd := &StringField{} - assert.NotEmpty(t, fd.description()) - f := stringField{} - - err := f.Scan("test") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Equal(t, "test", v) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Equal(t, "", v) - -} - -func TestUUIDField(t *testing.T) { - - fd := &UUIDField{} - assert.NotEmpty(t, fd.description()) - f := uuidField{} - - err := f.Scan("") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - - u1 := fftypes.NewUUID() - err = f.Scan(u1.String()) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Equal(t, v, u1.String()) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - -} - -func TestBytes32Field(t *testing.T) { - - fd := &Bytes32Field{} - assert.NotEmpty(t, fd.description()) - f := bytes32Field{} - - err := f.Scan("") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - - b1 := fftypes.NewRandB32() - err = f.Scan(b1.String()) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Equal(t, v, b1.String()) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - -} - -func TestInt64Field(t *testing.T) { - - fd := &Int64Field{} - assert.NotEmpty(t, fd.description()) - f := int64Field{} - - err := f.Scan("12345") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Equal(t, int64(12345), v) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Equal(t, int64(0), v) - -} - -func TestTimeField(t *testing.T) { - - fd := &TimeField{} - assert.NotEmpty(t, fd.description()) - f := timeField{} - - now := time.Now() - err := f.Scan(now.Format(time.RFC3339Nano)) - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Equal(t, v, now.UnixNano()) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - -} - -func TestJSONField(t *testing.T) { - - fd := &JSONField{} - assert.NotEmpty(t, fd.description()) - f := jsonField{} - - err := f.Scan("{}") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Equal(t, v, []byte("{}")) - - err = f.Scan(fftypes.JSONAnyPtr("{}")) - assert.NoError(t, err) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Nil(t, v) - -} - -func TestBoolField(t *testing.T) { - - fd := &BoolField{} - assert.NotEmpty(t, fd.description()) - f := boolField{} - - err := f.Scan("true") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.True(t, v.(bool)) - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.False(t, v.(bool)) - -} - -func TestFFStringArrayField(t *testing.T) { - - fd := &FFStringArrayField{} - assert.NotEmpty(t, fd.description()) - f := ffNameArrayField{} - - err := f.Scan("a,b") - assert.NoError(t, err) - v, err := f.Value() - assert.NoError(t, err) - assert.Equal(t, v, "a,b") - - err = f.Scan(nil) - assert.NoError(t, err) - v, err = f.Value() - assert.NoError(t, err) - assert.Equal(t, "", v) - -} diff --git a/pkg/database/update.go b/pkg/database/update.go deleted file mode 100644 index dcf5edbae1..0000000000 --- a/pkg/database/update.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package database - -import ( - "context" - "fmt" - "strings" - - "github.com/hyperledger/firefly-common/pkg/i18n" -) - -// UpdateBuilder is the output of the builder -type UpdateBuilder interface { - // Set starts creation of a set operation - Set(field string, value interface{}) Update - - // S starts an update that doesn't have any fields - S() Update - - // Fields returns the available fields on the update - Fields() []string -} - -type Update interface { - // Set adds a set condition to the update - Set(field string, value interface{}) Update - - // IsEmpty - IsEmpty() bool - - // Finalize completes the update, and for the plugin to validated output structure to convert - Finalize() (*UpdateInfo, error) -} - -// UpdateFactory creates a update builder in the given context, and contains the rules on -// which fields can be used by the builder (and how they are serialized) -type UpdateFactory interface { - New(ctx context.Context) UpdateBuilder -} - -// SetOperation is an individual update action to perform -type SetOperation struct { - Field string - Value FieldSerialization -} - -// UpdateInfo is the structure returned by Finalize to the plugin, to serialize this uilter -// into the underlying database mechanism's uilter language -type UpdateInfo struct { - SetOperations []*SetOperation -} - -type setOperation struct { - field string - value interface{} -} - -type updateBuilder struct { - ctx context.Context - queryFields queryFields -} - -func (ub *updateBuilder) Fields() []string { - keys := make([]string, len(ub.queryFields)) - i := 0 - for k := range ub.queryFields { - keys[i] = k - i++ - } - return keys -} - -func (ub *updateBuilder) Set(field string, value interface{}) Update { - return &setUpdate{ - ub: ub, - setOperations: []*setOperation{{field, value}}, - } -} - -func (ub *updateBuilder) S() Update { - return &setUpdate{ - ub: ub, - setOperations: []*setOperation{}, - } -} - -type setUpdate struct { - ub *updateBuilder - setOperations []*setOperation -} - -func (u *setUpdate) IsEmpty() bool { - return len(u.setOperations) == 0 -} - -func (u *setUpdate) Set(field string, value interface{}) Update { - u.setOperations = append(u.setOperations, &setOperation{field, value}) - return u -} - -func (u *UpdateInfo) String() string { - var buf strings.Builder - for i, si := range u.SetOperations { - if i > 0 { - buf.WriteString(", ") - } - buf.WriteString(fmt.Sprintf("%s=%s", si.Field, valueString(si.Value))) - } - return buf.String() -} - -func (u *setUpdate) Finalize() (*UpdateInfo, error) { - ui := &UpdateInfo{ - SetOperations: make([]*SetOperation, len(u.setOperations)), - } - for i, si := range u.setOperations { - name := strings.ToLower(si.field) - field, ok := u.ub.queryFields[name] - if !ok { - return nil, i18n.NewError(u.ub.ctx, i18n.MsgInvalidFilterField, name) - } - value := field.getSerialization() - if err := value.Scan(si.value); err != nil { - return nil, i18n.WrapError(u.ub.ctx, err, i18n.MsgInvalidValueForFilterField, name) - } - ui.SetOperations[i] = &SetOperation{ - Field: name, - Value: value, - } - } - return ui, nil -} diff --git a/pkg/database/update_test.go b/pkg/database/update_test.go deleted file mode 100644 index 58125971d4..0000000000 --- a/pkg/database/update_test.go +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this uile except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the speciuic language governing permissions and -// limitations under the License. - -package database - -import ( - "context" - "testing" - - "github.com/hyperledger/firefly-common/pkg/fftypes" - "github.com/hyperledger/firefly/pkg/core" - "github.com/stretchr/testify/assert" -) - -func TestUpdateBuilderOK(t *testing.T) { - uuid := fftypes.MustParseUUID("c414cab3-9bd4-48f3-b16a-0d74a3bbb60e") - u := MessageQueryFactory.NewUpdate(context.Background()).S() - assert.True(t, u.IsEmpty()) - u.Set("sequence", 12345). - Set("cid", uuid). - Set("author", "0x1234"). - Set("type", core.MessageTypePrivate) - assert.False(t, u.IsEmpty()) - ui, err := u.Finalize() - assert.NoError(t, err) - assert.Equal(t, "sequence=12345, cid='c414cab3-9bd4-48f3-b16a-0d74a3bbb60e', author='0x1234', type='private'", ui.String()) -} - -func TestUpdateBuilderBadField(t *testing.T) { - u := MessageQueryFactory.NewUpdate(context.Background()).Set("wrong", 12345) - _, err := u.Finalize() - assert.Regexp(t, "FF00142.*wrong", err) -} - -func TestUpdateBuilderBadValue(t *testing.T) { - u := MessageQueryFactory.NewUpdate(context.Background()).Set("id", map[bool]bool{true: false}) - _, err := u.Finalize() - assert.Regexp(t, "FF00143.*id", err) -} - -func TestUpdateBuilderGetFields(t *testing.T) { - ub := MessageQueryFactory.NewUpdate(context.Background()) - assert.NotNil(t, ub.Fields()) -} diff --git a/test/e2e/client/restclient.go b/test/e2e/client/restclient.go index 644243e885..35231b573e 100644 --- a/test/e2e/client/restclient.go +++ b/test/e2e/client/restclient.go @@ -315,7 +315,7 @@ func (client *FireFlyClient) BroadcastMessageAsIdentity(t *testing.T, did, topic SetBody(core.MessageInOut{ Message: core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{topic}, + Topics: fftypes.FFStringArray{topic}, SignerRef: core.SignerRef{ Author: did, }, @@ -435,7 +435,7 @@ func (client *FireFlyClient) BroadcastBlobMessage(t *testing.T, topic string) (* SetBody(core.MessageInOut{ Message: core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{topic}, + Topics: fftypes.FFStringArray{topic}, }, }, InlineData: core.InlineData{ @@ -452,7 +452,7 @@ func (client *FireFlyClient) PrivateBlobMessageDatatypeTagged(t *testing.T, topi SetBody(core.MessageInOut{ Message: core.Message{ Header: core.MessageHeader{ - Topics: core.FFStringArray{topic}, + Topics: fftypes.FFStringArray{topic}, }, }, InlineData: core.InlineData{ @@ -477,7 +477,7 @@ func (client *FireFlyClient) PrivateMessageWithKey(key, topic, idempotencyKey st Header: core.MessageHeader{ Tag: tag, TxType: txType, - Topics: core.FFStringArray{topic}, + Topics: fftypes.FFStringArray{topic}, SignerRef: core.SignerRef{ Key: key, }, diff --git a/test/e2e/multiparty/common.go b/test/e2e/multiparty/common.go index 13de38d0b6..5d1e6ffd4d 100644 --- a/test/e2e/multiparty/common.go +++ b/test/e2e/multiparty/common.go @@ -217,7 +217,7 @@ func validateReceivedMessages(ts *testState, client *client.FireFlyClient, topic var returnData []*core.Data for idx := 0; idx < len(messages); idx++ { assert.Equal(ts.t, txtype, (messages)[idx].Header.TxType) - assert.Equal(ts.t, core.FFStringArray{topic}, (messages)[idx].Header.Topics) + assert.Equal(ts.t, fftypes.FFStringArray{topic}, (messages)[idx].Header.Topics) assert.Equal(ts.t, topic, (messages)[idx].Header.Topics[0]) data := client.GetDataForMessage(ts.t, ts.startTime, (messages)[idx].Header.ID) From e13704fafa2c3eb1e3fd031b4843ba577e66b0c7 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 4 Dec 2022 21:18:49 -0500 Subject: [PATCH 2/5] Roll out changes through the packages Signed-off-by: Peter Broadhurst --- go.mod | 2 +- go.sum | 2 + internal/apiserver/ffi2swagger.go | 4 - internal/apiserver/restfilter.go | 255 ------------------ internal/apiserver/restfilter_test.go | 149 ---------- internal/apiserver/route_get_batches.go | 4 +- .../apiserver/route_get_blockchain_events.go | 4 +- .../route_get_contract_api_listeners.go | 4 +- internal/apiserver/route_get_contract_apis.go | 4 +- .../route_get_contract_interfaces.go | 4 +- .../apiserver/route_get_contract_listeners.go | 4 +- internal/apiserver/route_get_data.go | 4 +- internal/apiserver/route_get_data_blob.go | 2 +- internal/apiserver/route_get_data_msgs.go | 4 +- internal/apiserver/route_get_data_value.go | 3 +- internal/apiserver/route_get_datatypes.go | 4 +- internal/apiserver/route_get_events.go | 6 +- internal/apiserver/route_get_events_test.go | 10 +- internal/apiserver/route_get_groups.go | 4 +- internal/apiserver/route_get_identities.go | 6 +- .../apiserver/route_get_identity_verifiers.go | 4 +- internal/apiserver/route_get_msg_data.go | 2 +- internal/apiserver/route_get_msg_events.go | 4 +- internal/apiserver/route_get_msgs.go | 6 +- internal/apiserver/route_get_msgs_test.go | 10 +- internal/apiserver/route_get_namespaces.go | 2 +- .../apiserver/route_get_net_identities.go | 6 +- internal/apiserver/route_get_net_nodes.go | 4 +- internal/apiserver/route_get_net_orgs.go | 4 +- internal/apiserver/route_get_ops.go | 4 +- internal/apiserver/route_get_pins.go | 4 +- internal/apiserver/route_get_subscriptions.go | 4 +- .../route_get_token_account_pools.go | 4 +- .../apiserver/route_get_token_accounts.go | 4 +- .../apiserver/route_get_token_approvals.go | 6 +- .../apiserver/route_get_token_balances.go | 4 +- internal/apiserver/route_get_token_pools.go | 4 +- .../apiserver/route_get_token_transfers.go | 6 +- .../route_get_token_transfers_test.go | 4 +- .../route_get_txn_blockchainevents.go | 2 +- internal/apiserver/route_get_txn_by_id.go | 2 +- internal/apiserver/route_get_txn_ops.go | 2 +- internal/apiserver/route_get_txns.go | 4 +- internal/apiserver/route_get_verifiers.go | 4 +- .../apiserver/route_spi_get_namespaces.go | 2 +- internal/apiserver/route_spi_get_ops.go | 4 +- internal/apiserver/route_spi_post_reset.go | 2 +- internal/apiserver/routes.go | 3 - internal/apiserver/server.go | 35 +-- internal/assets/manager.go | 19 +- internal/assets/token_approval.go | 4 +- internal/assets/token_approval_test.go | 9 +- internal/assets/token_pool.go | 3 +- internal/assets/token_transfer.go | 4 +- internal/assets/token_transfer_test.go | 13 +- internal/batch/batch_manager_test.go | 8 +- internal/batch/batch_processor.go | 3 +- internal/contracts/manager.go | 17 +- internal/contracts/manager_test.go | 5 +- internal/events/aggregator.go | 5 +- internal/events/event_dispatcher.go | 5 +- internal/events/event_poller.go | 7 +- internal/networkmap/data_query.go | 15 +- internal/networkmap/manager.go | 15 +- internal/operations/manager_test.go | 3 +- internal/operations/operation_updater_test.go | 5 +- internal/orchestrator/data_query.go | 31 +-- internal/orchestrator/data_query_test.go | 3 +- internal/orchestrator/orchestrator.go | 33 +-- internal/orchestrator/subscriptions.go | 4 +- internal/privatemessaging/groupmanager.go | 5 +- .../shareddownload/download_manager_test.go | 5 +- internal/txcommon/txcommon_test.go | 3 +- mocks/assetmocks/manager.go | 74 ++--- mocks/contractmocks/manager.go | 50 ++-- mocks/networkmapmocks/manager.go | 86 +++--- mocks/orchestratormocks/orchestrator.go | 186 ++++++------- mocks/privatemessagingmocks/manager.go | 14 +- 78 files changed, 425 insertions(+), 834 deletions(-) delete mode 100644 internal/apiserver/restfilter.go delete mode 100644 internal/apiserver/restfilter_test.go diff --git a/go.mod b/go.mod index f6b2b1912d..156bd4f960 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/golang-migrate/migrate/v4 v4.15.2 github.com/gorilla/mux v1.8.0 github.com/gorilla/websocket v1.5.0 - github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f + github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396 github.com/hyperledger/firefly-signer v1.1.2 github.com/jarcoal/httpmock v1.2.0 github.com/karlseguin/ccache v2.0.3+incompatible diff --git a/go.sum b/go.sum index 24833da598..e8282e940b 100644 --- a/go.sum +++ b/go.sum @@ -689,6 +689,8 @@ github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41 h1:Tb github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f h1:EfkvaVkVldy0tmJUjN0pnR83DpZYfG1ARR60c+Q4NJ4= github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396 h1:iGT7SpBMGFR2+WlEXa3kYMeIQAFMbdsp9rRWwBk1E4o= +github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-signer v1.1.2 h1:QuS3M5w9px3BnPa4jIWMDg+z2ySK76MoO5Egh0G+tFg= github.com/hyperledger/firefly-signer v1.1.2/go.mod h1:4h2MN910A2knrWGYCT+aWjBDlhptgQn/9WcT1N/Ct8s= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= diff --git a/internal/apiserver/ffi2swagger.go b/internal/apiserver/ffi2swagger.go index f639f9cc08..cde43a1125 100644 --- a/internal/apiserver/ffi2swagger.go +++ b/internal/apiserver/ffi2swagger.go @@ -32,7 +32,6 @@ import ( "github.com/hyperledger/firefly/internal/coreconfig" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) type FFISwaggerGen interface { @@ -151,9 +150,6 @@ func (og *ffiSwaggerGen) addEvent(routes []*ffapi.Route, event *fftypes.FFIEvent JSONInputValue: nil, JSONOutputValue: func() interface{} { return []*core.ContractListener{} }, JSONOutputCodes: []int{http.StatusOK}, - Extensions: &coreExtensions{ - FilterFactory: database.ContractListenerQueryFactory, - }, }) return routes } diff --git a/internal/apiserver/restfilter.go b/internal/apiserver/restfilter.go deleted file mode 100644 index 6c71be126f..0000000000 --- a/internal/apiserver/restfilter.go +++ /dev/null @@ -1,255 +0,0 @@ -// Copyright © 2022 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package apiserver - -import ( - "context" - "database/sql/driver" - "net/http" - "net/url" - "reflect" - "sort" - "strconv" - "strings" - - "github.com/hyperledger/firefly-common/pkg/i18n" - "github.com/hyperledger/firefly-common/pkg/log" - "github.com/hyperledger/firefly/internal/coremsgs" - "github.com/hyperledger/firefly/pkg/database" -) - -type filterResultsWithCount struct { - Count int64 `json:"count"` - Total int64 `json:"total"` - Items interface{} `json:"items"` -} - -type filterModifiers struct { - negate bool - caseInsensitive bool - emptyIsNull bool -} - -func syncRetcode(isSync bool) int { - if isSync { - return http.StatusOK - } - return http.StatusAccepted -} - -func filterResult(items interface{}, res *database.FilterResult, err error) (interface{}, error) { - itemsVal := reflect.ValueOf(items) - if err != nil || res == nil || res.TotalCount == nil || itemsVal.Kind() != reflect.Slice { - return items, err - } - return &filterResultsWithCount{ - Total: *res.TotalCount, - Count: int64(itemsVal.Len()), - Items: items, - }, nil -} - -func (as *apiServer) getValues(values url.Values, key string) (results []string) { - for queryName, queryValues := range values { - // We choose to be case insensitive for our filters, so protocolID and protocolid can be used interchangeably - if strings.EqualFold(queryName, key) { - results = append(results, queryValues...) - } - } - return results -} - -func (as *apiServer) buildFilter(req *http.Request, ff database.QueryFactory) (database.AndFilter, error) { - ctx := req.Context() - log.L(ctx).Debugf("Query: %s", req.URL.RawQuery) - fb := ff.NewFilterLimit(ctx, as.defaultFilterLimit) - possibleFields := fb.Fields() - sort.Strings(possibleFields) - filter := fb.And() - _ = req.ParseForm() - for _, field := range possibleFields { - values := as.getValues(req.Form, field) - if len(values) == 1 { - cond, err := as.getCondition(ctx, fb, field, values[0]) - if err != nil { - return nil, err - } - filter.Condition(cond) - } else if len(values) > 0 { - sort.Strings(values) - fs := make([]database.Filter, len(values)) - for i, value := range values { - cond, err := as.getCondition(ctx, fb, field, value) - if err != nil { - return nil, err - } - fs[i] = cond - } - filter.Condition(fb.Or(fs...)) - } - } - skipVals := as.getValues(req.Form, "skip") - if len(skipVals) > 0 { - s, _ := strconv.ParseUint(skipVals[0], 10, 64) - if as.maxFilterSkip != 0 && s > as.maxFilterSkip { - return nil, i18n.NewError(req.Context(), coremsgs.MsgMaxFilterSkip, as.maxFilterSkip) - } - filter.Skip(s) - } - limitVals := as.getValues(req.Form, "limit") - if len(limitVals) > 0 { - l, _ := strconv.ParseUint(limitVals[0], 10, 64) - if as.maxFilterLimit != 0 && l > as.maxFilterLimit { - return nil, i18n.NewError(req.Context(), coremsgs.MsgMaxFilterLimit, as.maxFilterLimit) - } - filter.Limit(l) - } - sortVals := as.getValues(req.Form, "sort") - for _, sv := range sortVals { - subSortVals := strings.Split(sv, ",") - for _, ssv := range subSortVals { - ssv = strings.TrimSpace(ssv) - if ssv != "" { - filter.Sort(ssv) - } - } - } - descendingVals := as.getValues(req.Form, "descending") - ascendingVals := as.getValues(req.Form, "ascending") - if len(descendingVals) > 0 && (descendingVals[0] == "" || strings.EqualFold(descendingVals[0], "true")) { - filter.Descending() - } else if len(ascendingVals) > 0 && (ascendingVals[0] == "" || strings.EqualFold(ascendingVals[0], "true")) { - filter.Ascending() - } - countVals := as.getValues(req.Form, "count") - filter.Count(len(countVals) > 0 && (countVals[0] == "" || strings.EqualFold(countVals[0], "true"))) - return filter, nil -} - -func (as *apiServer) checkNoMods(ctx context.Context, mods filterModifiers, field, op string, filter database.Filter) (database.Filter, error) { - emptyModifiers := filterModifiers{} - if mods != emptyModifiers { - return nil, i18n.NewError(ctx, coremsgs.MsgQueryOpUnsupportedMod, op, field) - } - return filter, nil -} - -func (as *apiServer) getCondition(ctx context.Context, fb database.FilterBuilder, field, value string) (filter database.Filter, err error) { - - mods := filterModifiers{} - operator := make([]rune, 0, 2) - prefixLength := 0 -opFinder: - for _, r := range value { - switch r { - case '!': - mods.negate = true - prefixLength++ - case ':': - mods.caseInsensitive = true - prefixLength++ - case '?': - mods.emptyIsNull = true - prefixLength++ - case '>', '<': - // Terminates the opFinder if it's the second character - if len(operator) == 1 && operator[0] != r { - // Detected "><" or "<>" - which is a single char operator, followed by beginning of match string - break opFinder - } - operator = append(operator, r) - prefixLength++ - if len(operator) > 1 { - // Detected ">>" or "<<" full operators - break opFinder - } - case '=', '@', '^', '$': - // Always terminates the opFinder - // Could be ">=" or "<=" (due to above logic continuing on '>' or '<' first char) - operator = append(operator, r) - prefixLength++ - break opFinder - default: - // Found a normal character - break opFinder - } - } - - var matchString driver.Value = value[prefixLength:] - if mods.emptyIsNull && prefixLength == len(value) { - matchString = nil - } - return as.mapOperation(ctx, fb, field, matchString, string(operator), mods) -} - -func (as *apiServer) mapOperation(ctx context.Context, fb database.FilterBuilder, field string, matchString driver.Value, op string, mods filterModifiers) (filter database.Filter, err error) { - - switch op { - case ">=": - return as.checkNoMods(ctx, mods, field, op, fb.Gte(field, matchString)) - case "<=": - return as.checkNoMods(ctx, mods, field, op, fb.Lte(field, matchString)) - case ">", ">>": - return as.checkNoMods(ctx, mods, field, op, fb.Gt(field, matchString)) - case "<", "<<": - return as.checkNoMods(ctx, mods, field, op, fb.Lt(field, matchString)) - case "@": - if mods.caseInsensitive { - if mods.negate { - return fb.NotIContains(field, matchString), nil - } - return fb.IContains(field, matchString), nil - } - if mods.negate { - return fb.NotContains(field, matchString), nil - } - return fb.Contains(field, matchString), nil - case "^": - if mods.caseInsensitive { - if mods.negate { - return fb.NotIStartsWith(field, matchString), nil - } - return fb.IStartsWith(field, matchString), nil - } - if mods.negate { - return fb.NotStartsWith(field, matchString), nil - } - return fb.StartsWith(field, matchString), nil - case "$": - if mods.caseInsensitive { - if mods.negate { - return fb.NotIEndsWith(field, matchString), nil - } - return fb.IEndsWith(field, matchString), nil - } - if mods.negate { - return fb.NotEndsWith(field, matchString), nil - } - return fb.EndsWith(field, matchString), nil - default: - if mods.caseInsensitive { - if mods.negate { - return fb.NIeq(field, matchString), nil - } - return fb.IEq(field, matchString), nil - } - if mods.negate { - return fb.Neq(field, matchString), nil - } - return fb.Eq(field, matchString), nil - } -} diff --git a/internal/apiserver/restfilter_test.go b/internal/apiserver/restfilter_test.go deleted file mode 100644 index 599e02896d..0000000000 --- a/internal/apiserver/restfilter_test.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package apiserver - -import ( - "fmt" - "net/http/httptest" - "testing" - - "github.com/hyperledger/firefly/pkg/database" - "github.com/stretchr/testify/assert" -) - -func TestBuildFilterDescending(t *testing.T) { - as := &apiServer{ - maxFilterLimit: 250, - } - - req := httptest.NewRequest("GET", "/things?created=0&confirmed=!0&Tag=>abc&TAG==abc&tag=@abc&tag=^abc&tag=!@abc&tag=!^abc&skip=10&limit=50&sort=tag,sequence&descending", nil) - filter, err := as.buildFilter(req, database.MessageQueryFactory) - assert.NoError(t, err) - fi, err := filter.Finalize() - assert.NoError(t, err) - - assert.Equal(t, "( confirmed != 0 ) && ( created == 0 ) && ( ( tag !% 'abc' ) || ( tag !^ 'abc' ) || ( tag <= 'abc' ) || ( tag << 'abc' ) || ( tag >= 'abc' ) || ( tag >> 'abc' ) || ( tag %= 'abc' ) || ( tag ^= 'abc' ) ) sort=-tag,-sequence skip=10 limit=50", fi.String()) -} - -func testIndividualFilter(t *testing.T, queryString, expectedToString string) { - as := &apiServer{ - maxFilterLimit: 250, - } - req := httptest.NewRequest("GET", fmt.Sprintf("/things?%s", queryString), nil) - filter, err := as.buildFilter(req, database.MessageQueryFactory) - assert.NoError(t, err) - fi, err := filter.Finalize() - assert.NoError(t, err) - assert.Equal(t, expectedToString, fi.String()) -} - -func TestBuildFilterEachCombo(t *testing.T) { - testIndividualFilter(t, "tag=cat", "( tag == 'cat' )") - testIndividualFilter(t, "tag==cat", "( tag == 'cat' )") - testIndividualFilter(t, "tag===cat", "( tag == '=cat' )") - testIndividualFilter(t, "tag=!cat", "( tag != 'cat' )") - testIndividualFilter(t, "tag=!=cat", "( tag != 'cat' )") - testIndividualFilter(t, "tag=!=!cat", "( tag != '!cat' )") - testIndividualFilter(t, "tag=!==cat", "( tag != '=cat' )") - testIndividualFilter(t, "tag=!:=cat", "( tag ;= 'cat' )") - testIndividualFilter(t, "tag=:!=cat", "( tag ;= 'cat' )") - testIndividualFilter(t, "tag=:=cat", "( tag := 'cat' )") - testIndividualFilter(t, "tag=>cat", "( tag >> 'cat' )") - testIndividualFilter(t, "tag=>>cat", "( tag >> 'cat' )") - testIndividualFilter(t, "tag=>>>cat", "( tag >> '>cat' )") - testIndividualFilter(t, "tag=cat", "( tag << '>cat' )") - testIndividualFilter(t, "tag=>> '=cat", "( tag >= 'cat' )") - testIndividualFilter(t, "tag=<=cat", "( tag <= 'cat' )") - testIndividualFilter(t, "tag=>=>cat", "( tag >= '>cat' )") - testIndividualFilter(t, "tag=>==cat", "( tag >= '=cat' )") - testIndividualFilter(t, "tag=@@cat", "( tag %= '@cat' )") - testIndividualFilter(t, "tag=@cat", "( tag %= 'cat' )") - testIndividualFilter(t, "tag=!@cat", "( tag !% 'cat' )") - testIndividualFilter(t, "tag=:@cat", "( tag :% 'cat' )") - testIndividualFilter(t, "tag=!:@cat", "( tag ;% 'cat' )") - testIndividualFilter(t, "tag=^cat", "( tag ^= 'cat' )") - testIndividualFilter(t, "tag=!^cat", "( tag !^ 'cat' )") - testIndividualFilter(t, "tag=:^cat", "( tag :^ 'cat' )") - testIndividualFilter(t, "tag=!:^cat", "( tag ;^ 'cat' )") - testIndividualFilter(t, "tag=$cat", "( tag $= 'cat' )") - testIndividualFilter(t, "tag=!$cat", "( tag !$ 'cat' )") - testIndividualFilter(t, "tag=:$cat", "( tag :$ 'cat' )") - testIndividualFilter(t, "tag=!:$cat", "( tag ;$ 'cat' )") - testIndividualFilter(t, "tag==", "( tag == '' )") - testIndividualFilter(t, "tag=!=", "( tag != '' )") - testIndividualFilter(t, "tag=:!=", "( tag ;= '' )") - testIndividualFilter(t, "tag=?", "( tag == null )") - testIndividualFilter(t, "tag=!?", "( tag != null )") - testIndividualFilter(t, "tag=?=", "( tag == null )") - testIndividualFilter(t, "tag=!?=", "( tag != null )") - testIndividualFilter(t, "tag=?:!=", "( tag ;= null )") -} - -func testFailFilter(t *testing.T, queryString, errCode string) { - as := &apiServer{ - maxFilterLimit: 250, - } - req := httptest.NewRequest("GET", fmt.Sprintf("/things?%s", queryString), nil) - _, err := as.buildFilter(req, database.MessageQueryFactory) - assert.Regexp(t, errCode, err) -} - -func TestCheckNoMods(t *testing.T) { - testFailFilter(t, "tag=!>=test", "FF10322") - testFailFilter(t, "tag=:>test", "FF10322") - testFailFilter(t, "tag=! %d (first pin would be %d)", offset, offset+1) pins, _, err := ag.database.GetPins(ctx, ag.namespace, filter) ls := make([]core.LocallySequenced, len(pins)) diff --git a/internal/events/event_dispatcher.go b/internal/events/event_dispatcher.go index dc4c9f3aa3..e05db76e53 100644 --- a/internal/events/event_dispatcher.go +++ b/internal/events/event_dispatcher.go @@ -22,6 +22,7 @@ import ( "sync" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -114,7 +115,7 @@ func newEventDispatcher(ctx context.Context, enricher *eventEnricher, ei events. namespace: sub.definition.Namespace, offsetType: core.OffsetTypeSubscription, offsetName: sub.definition.ID.String(), - addCriteria: func(af database.AndFilter) database.AndFilter { return af }, + addCriteria: func(af ffapi.AndFilter) ffapi.AndFilter { return af }, queryFactory: database.EventQueryFactory, getItems: ed.getEvents, newEventsHandler: ed.bufferedDelivery, @@ -153,7 +154,7 @@ func (ed *eventDispatcher) electAndStart() { <-ed.eventPoller.closed } -func (ed *eventDispatcher) getEvents(ctx context.Context, filter database.Filter, offset int64) ([]core.LocallySequenced, error) { +func (ed *eventDispatcher) getEvents(ctx context.Context, filter ffapi.Filter, offset int64) ([]core.LocallySequenced, error) { log.L(ctx).Tracef("Reading page of events > %d (first events would be %d)", offset, offset+1) events, _, err := ed.database.GetEvents(ctx, ed.namespace, filter) ls := make([]core.LocallySequenced, len(events)) diff --git a/internal/events/event_poller.go b/internal/events/event_poller.go index e5ee1d5e4e..6ffcc4a9ba 100644 --- a/internal/events/event_poller.go +++ b/internal/events/event_poller.go @@ -22,6 +22,7 @@ import ( "sync" "time" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly-common/pkg/retry" "github.com/hyperledger/firefly/pkg/core" @@ -49,9 +50,9 @@ type eventPollerConf struct { eventBatchTimeout time.Duration eventPollTimeout time.Duration firstEvent *core.SubOptsFirstEvent - queryFactory database.QueryFactory - addCriteria func(database.AndFilter) database.AndFilter - getItems func(context.Context, database.Filter, int64) ([]core.LocallySequenced, error) + queryFactory ffapi.QueryFactory + addCriteria func(ffapi.AndFilter) ffapi.AndFilter + getItems func(context.Context, ffapi.Filter, int64) ([]core.LocallySequenced, error) maybeRewind func() (bool, int64) newEventsHandler newEventsHandler namespace string diff --git a/internal/networkmap/data_query.go b/internal/networkmap/data_query.go index fa925634d8..fd6194790b 100644 --- a/internal/networkmap/data_query.go +++ b/internal/networkmap/data_query.go @@ -20,6 +20,7 @@ import ( "context" "database/sql/driver" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -50,12 +51,12 @@ func (nm *networkMap) GetOrganizationByNameOrID(ctx context.Context, nameOrID st return org, nil } -func (nm *networkMap) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (nm *networkMap) GetOrganizations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { filter.Condition(filter.Builder().Eq("type", core.IdentityTypeOrg)) return nm.GetIdentities(ctx, filter) } -func (nm *networkMap) GetOrganizationsWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) { +func (nm *networkMap) GetOrganizationsWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) { filter.Condition(filter.Builder().Eq("type", core.IdentityTypeOrg)) return nm.GetIdentitiesWithVerifiers(ctx, filter) } @@ -82,7 +83,7 @@ func (nm *networkMap) GetNodeByNameOrID(ctx context.Context, nameOrID string) (n return node, nil } -func (nm *networkMap) GetNodes(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (nm *networkMap) GetNodes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { filter.Condition(filter.Builder().Eq("type", core.IdentityTypeNode)) return nm.database.GetIdentities(ctx, nm.namespace, filter) } @@ -145,11 +146,11 @@ func (nm *networkMap) GetIdentityByDIDWithVerifiers(ctx context.Context, did str return nm.withVerifiers(ctx, identity) } -func (nm *networkMap) GetIdentities(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (nm *networkMap) GetIdentities(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { return nm.database.GetIdentities(ctx, nm.namespace, filter) } -func (nm *networkMap) GetIdentitiesWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) { +func (nm *networkMap) GetIdentitiesWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) { identities, res, err := nm.database.GetIdentities(ctx, nm.namespace, filter) if err != nil { return nil, nil, err @@ -181,7 +182,7 @@ func (nm *networkMap) GetIdentitiesWithVerifiers(ctx context.Context, filter dat return idsWithVerifiers, res, err } -func (nm *networkMap) GetIdentityVerifiers(ctx context.Context, id string, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) { +func (nm *networkMap) GetIdentityVerifiers(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) { identity, err := nm.GetIdentityByID(ctx, id) if err != nil { return nil, nil, err @@ -221,6 +222,6 @@ func (nm *networkMap) GetVerifierByHash(ctx context.Context, hash string) (*core return verifier, nil } -func (nm *networkMap) GetVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) { +func (nm *networkMap) GetVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) { return nm.database.GetVerifiers(ctx, nm.namespace, filter) } diff --git a/internal/networkmap/manager.go b/internal/networkmap/manager.go index feaafda3fd..bb45434704 100644 --- a/internal/networkmap/manager.go +++ b/internal/networkmap/manager.go @@ -19,6 +19,7 @@ package networkmap import ( "context" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/internal/definitions" @@ -38,18 +39,18 @@ type Manager interface { UpdateIdentity(ctx context.Context, id string, dto *core.IdentityUpdateDTO, waitConfirm bool) (identity *core.Identity, err error) GetOrganizationByNameOrID(ctx context.Context, nameOrID string) (*core.Identity, error) - GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) - GetOrganizationsWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) + GetOrganizations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) + GetOrganizationsWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) GetNodeByNameOrID(ctx context.Context, nameOrID string) (*core.Identity, error) - GetNodes(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) + GetNodes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) GetIdentityByID(ctx context.Context, id string) (*core.Identity, error) GetIdentityByIDWithVerifiers(ctx context.Context, id string) (*core.IdentityWithVerifiers, error) GetIdentityByDID(ctx context.Context, did string) (*core.Identity, error) GetIdentityByDIDWithVerifiers(ctx context.Context, did string) (*core.IdentityWithVerifiers, error) - GetIdentities(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) - GetIdentitiesWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) - GetIdentityVerifiers(ctx context.Context, id string, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) - GetVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) + GetIdentities(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) + GetIdentitiesWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) + GetIdentityVerifiers(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) + GetVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) GetVerifierByHash(ctx context.Context, hash string) (*core.Verifier, error) GetDIDDocForIndentityByID(ctx context.Context, id string) (*DIDDocument, error) GetDIDDocForIndentityByDID(ctx context.Context, did string) (*DIDDocument, error) diff --git a/internal/operations/manager_test.go b/internal/operations/manager_test.go index ef54634174..d61bd0e989 100644 --- a/internal/operations/manager_test.go +++ b/internal/operations/manager_test.go @@ -23,6 +23,7 @@ import ( "time" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/internal/cache" "github.com/hyperledger/firefly/internal/coreconfig" @@ -263,7 +264,7 @@ func TestRetryOperationSuccess(t *testing.T) { assert.Equal(t, core.OpTypeBlockchainPinBatch, newOp.Type) return true })).Return(nil) - mdi.On("UpdateOperation", ctx, "ns1", op.ID, mock.MatchedBy(func(update database.Update) bool { + mdi.On("UpdateOperation", ctx, "ns1", op.ID, mock.MatchedBy(func(update ffapi.Update) bool { info, err := update.Finalize() assert.NoError(t, err) assert.Equal(t, 1, len(info.SetOperations)) diff --git a/internal/operations/operation_updater_test.go b/internal/operations/operation_updater_test.go index 1d69745dbf..159f1a52b4 100644 --- a/internal/operations/operation_updater_test.go +++ b/internal/operations/operation_updater_test.go @@ -23,6 +23,7 @@ import ( "time" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/internal/cache" "github.com/hyperledger/firefly/internal/coreconfig" @@ -69,8 +70,8 @@ func newTestOperationUpdaterCommon(t *testing.T, dbCapabilities *database.Capabi return newOperationUpdater(context.Background(), mom, mdi, txHelper) } -func updateMatcher(vals [][]string) func(database.Update) bool { - return func(update database.Update) bool { +func updateMatcher(vals [][]string) func(ffapi.Update) bool { + return func(update ffapi.Update) bool { info, _ := update.Finalize() if len(info.SetOperations) != len(vals) { fmt.Printf("Failed: %d != %d\n", len(info.SetOperations), len(vals)) diff --git a/internal/orchestrator/data_query.go b/internal/orchestrator/data_query.go index dbaea570f5..2f3b8cfc79 100644 --- a/internal/orchestrator/data_query.go +++ b/internal/orchestrator/data_query.go @@ -20,6 +20,7 @@ import ( "context" "database/sql/driver" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly/internal/coremsgs" @@ -39,7 +40,7 @@ func (or *orchestrator) GetTransactionByID(ctx context.Context, id string) (*cor return or.txHelper.GetTransactionByIDCached(ctx, u) } -func (or *orchestrator) GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *database.FilterResult, error) { +func (or *orchestrator) GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *ffapi.FilterResult, error) { u, err := fftypes.ParseUUID(ctx, id) if err != nil { return nil, nil, err @@ -147,15 +148,15 @@ func (or *orchestrator) GetEventByIDWithReference(ctx context.Context, id string return or.events.EnrichEvent(ctx, event) } -func (or *orchestrator) GetTransactions(ctx context.Context, filter database.AndFilter) ([]*core.Transaction, *database.FilterResult, error) { +func (or *orchestrator) GetTransactions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Transaction, *ffapi.FilterResult, error) { return or.database().GetTransactions(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetMessages(ctx context.Context, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) { +func (or *orchestrator) GetMessages(ctx context.Context, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) { return or.database().GetMessages(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetMessagesWithData(ctx context.Context, filter database.AndFilter) ([]*core.MessageInOut, *database.FilterResult, error) { +func (or *orchestrator) GetMessagesWithData(ctx context.Context, filter ffapi.AndFilter) ([]*core.MessageInOut, *ffapi.FilterResult, error) { msgs, fr, err := or.database().GetMessages(ctx, or.namespace.Name, filter) if err != nil { return nil, nil, err @@ -213,7 +214,7 @@ func (or *orchestrator) GetMessageTransaction(ctx context.Context, id string) (* return or.txHelper.GetTransactionByIDCached(ctx, txID) } -func (or *orchestrator) GetMessageEvents(ctx context.Context, id string, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) { +func (or *orchestrator) GetMessageEvents(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) { msg, err := or.getMessageByID(ctx, id) if err != nil || msg == nil { return nil, nil, err @@ -230,15 +231,15 @@ func (or *orchestrator) GetMessageEvents(ctx context.Context, id string, filter return or.database().GetEvents(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetBatches(ctx context.Context, filter database.AndFilter) ([]*core.BatchPersisted, *database.FilterResult, error) { +func (or *orchestrator) GetBatches(ctx context.Context, filter ffapi.AndFilter) ([]*core.BatchPersisted, *ffapi.FilterResult, error) { return or.database().GetBatches(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetData(ctx context.Context, filter database.AndFilter) (core.DataArray, *database.FilterResult, error) { +func (or *orchestrator) GetData(ctx context.Context, filter ffapi.AndFilter) (core.DataArray, *ffapi.FilterResult, error) { return or.database().GetData(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetMessagesForData(ctx context.Context, id string, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) { +func (or *orchestrator) GetMessagesForData(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) { u, err := fftypes.ParseUUID(ctx, id) if err != nil { return nil, nil, err @@ -246,15 +247,15 @@ func (or *orchestrator) GetMessagesForData(ctx context.Context, id string, filte return or.database().GetMessagesForData(ctx, or.namespace.Name, u, filter) } -func (or *orchestrator) GetDatatypes(ctx context.Context, filter database.AndFilter) ([]*core.Datatype, *database.FilterResult, error) { +func (or *orchestrator) GetDatatypes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Datatype, *ffapi.FilterResult, error) { return or.database().GetDatatypes(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetOperations(ctx context.Context, filter database.AndFilter) ([]*core.Operation, *database.FilterResult, error) { +func (or *orchestrator) GetOperations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Operation, *ffapi.FilterResult, error) { return or.database().GetOperations(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetEvents(ctx context.Context, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) { +func (or *orchestrator) GetEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) { return or.database().GetEvents(ctx, or.namespace.Name, filter) } @@ -266,11 +267,11 @@ func (or *orchestrator) GetBlockchainEventByID(ctx context.Context, id string) ( return or.txHelper.GetBlockchainEventByIDCached(ctx, u) } -func (or *orchestrator) GetBlockchainEvents(ctx context.Context, filter database.AndFilter) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (or *orchestrator) GetBlockchainEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { return or.database().GetBlockchainEvents(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (or *orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { u, err := fftypes.ParseUUID(ctx, id) if err != nil { return nil, nil, err @@ -279,11 +280,11 @@ func (or *orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id s return or.database().GetBlockchainEvents(ctx, or.namespace.Name, fb.And(fb.Eq("tx.id", u))) } -func (or *orchestrator) GetPins(ctx context.Context, filter database.AndFilter) ([]*core.Pin, *database.FilterResult, error) { +func (or *orchestrator) GetPins(ctx context.Context, filter ffapi.AndFilter) ([]*core.Pin, *ffapi.FilterResult, error) { return or.database().GetPins(ctx, or.namespace.Name, filter) } -func (or *orchestrator) GetEventsWithReferences(ctx context.Context, filter database.AndFilter) ([]*core.EnrichedEvent, *database.FilterResult, error) { +func (or *orchestrator) GetEventsWithReferences(ctx context.Context, filter ffapi.AndFilter) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) { events, fr, err := or.database().GetEvents(ctx, or.namespace.Name, filter) if err != nil { return nil, nil, err diff --git a/internal/orchestrator/data_query_test.go b/internal/orchestrator/data_query_test.go index b30d807e35..40be6db082 100644 --- a/internal/orchestrator/data_query_test.go +++ b/internal/orchestrator/data_query_test.go @@ -21,6 +21,7 @@ import ( "fmt" "testing" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/pkg/core" "github.com/hyperledger/firefly/pkg/database" @@ -408,7 +409,7 @@ func TestGetMessageEventsOk(t *testing.T) { f := fb.And(fb.Eq("type", core.EventTypeMessageConfirmed)) _, _, err := or.GetMessageEvents(context.Background(), fftypes.NewUUID().String(), f) assert.NoError(t, err) - calculatedFilter, err := or.mdi.Calls[1].Arguments[2].(database.Filter).Finalize() + calculatedFilter, err := or.mdi.Calls[1].Arguments[2].(ffapi.Filter).Finalize() assert.NoError(t, err) assert.Equal(t, fmt.Sprintf( `( type == 'message_confirmed' ) && ( reference IN ['%s','%s','%s'] )`, diff --git a/internal/orchestrator/orchestrator.go b/internal/orchestrator/orchestrator.go index edfabc4c06..ca9bdffa3b 100644 --- a/internal/orchestrator/orchestrator.go +++ b/internal/orchestrator/orchestrator.go @@ -20,6 +20,7 @@ import ( "context" "github.com/hyperledger/firefly-common/pkg/auth" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -74,7 +75,7 @@ type Orchestrator interface { GetStatus(ctx context.Context) (*core.NamespaceStatus, error) // Subscription management - GetSubscriptions(ctx context.Context, filter database.AndFilter) ([]*core.Subscription, *database.FilterResult, error) + GetSubscriptions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Subscription, *ffapi.FilterResult, error) GetSubscriptionByID(ctx context.Context, id string) (*core.Subscription, error) GetSubscriptionByIDWithStatus(ctx context.Context, id string) (*core.SubscriptionWithStatus, error) CreateSubscription(ctx context.Context, subDef *core.Subscription) (*core.Subscription, error) @@ -84,34 +85,34 @@ type Orchestrator interface { // Data Query GetNamespace(ctx context.Context) *core.Namespace GetTransactionByID(ctx context.Context, id string) (*core.Transaction, error) - GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *database.FilterResult, error) - GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *database.FilterResult, error) + GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *ffapi.FilterResult, error) + GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) GetTransactionStatus(ctx context.Context, id string) (*core.TransactionStatus, error) - GetTransactions(ctx context.Context, filter database.AndFilter) ([]*core.Transaction, *database.FilterResult, error) + GetTransactions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Transaction, *ffapi.FilterResult, error) GetMessageByID(ctx context.Context, id string) (*core.Message, error) GetMessageByIDWithData(ctx context.Context, id string) (*core.MessageInOut, error) - GetMessages(ctx context.Context, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) - GetMessagesWithData(ctx context.Context, filter database.AndFilter) ([]*core.MessageInOut, *database.FilterResult, error) + GetMessages(ctx context.Context, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) + GetMessagesWithData(ctx context.Context, filter ffapi.AndFilter) ([]*core.MessageInOut, *ffapi.FilterResult, error) GetMessageTransaction(ctx context.Context, id string) (*core.Transaction, error) - GetMessageEvents(ctx context.Context, id string, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) + GetMessageEvents(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) GetMessageData(ctx context.Context, id string) (core.DataArray, error) - GetMessagesForData(ctx context.Context, dataID string, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) + GetMessagesForData(ctx context.Context, dataID string, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) GetBatchByID(ctx context.Context, id string) (*core.BatchPersisted, error) - GetBatches(ctx context.Context, filter database.AndFilter) ([]*core.BatchPersisted, *database.FilterResult, error) + GetBatches(ctx context.Context, filter ffapi.AndFilter) ([]*core.BatchPersisted, *ffapi.FilterResult, error) GetDataByID(ctx context.Context, id string) (*core.Data, error) - GetData(ctx context.Context, filter database.AndFilter) (core.DataArray, *database.FilterResult, error) + GetData(ctx context.Context, filter ffapi.AndFilter) (core.DataArray, *ffapi.FilterResult, error) GetDatatypeByID(ctx context.Context, id string) (*core.Datatype, error) GetDatatypeByName(ctx context.Context, name, version string) (*core.Datatype, error) - GetDatatypes(ctx context.Context, filter database.AndFilter) ([]*core.Datatype, *database.FilterResult, error) + GetDatatypes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Datatype, *ffapi.FilterResult, error) GetOperationByID(ctx context.Context, id string) (*core.Operation, error) - GetOperations(ctx context.Context, filter database.AndFilter) ([]*core.Operation, *database.FilterResult, error) + GetOperations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Operation, *ffapi.FilterResult, error) GetEventByID(ctx context.Context, id string) (*core.Event, error) GetEventByIDWithReference(ctx context.Context, id string) (*core.EnrichedEvent, error) - GetEvents(ctx context.Context, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) - GetEventsWithReferences(ctx context.Context, filter database.AndFilter) ([]*core.EnrichedEvent, *database.FilterResult, error) + GetEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) + GetEventsWithReferences(ctx context.Context, filter ffapi.AndFilter) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) GetBlockchainEventByID(ctx context.Context, id string) (*core.BlockchainEvent, error) - GetBlockchainEvents(ctx context.Context, filter database.AndFilter) ([]*core.BlockchainEvent, *database.FilterResult, error) - GetPins(ctx context.Context, filter database.AndFilter) ([]*core.Pin, *database.FilterResult, error) + GetBlockchainEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) + GetPins(ctx context.Context, filter ffapi.AndFilter) ([]*core.Pin, *ffapi.FilterResult, error) RewindPins(ctx context.Context, rewind *core.PinRewind) (*core.PinRewind, error) // Charts diff --git a/internal/orchestrator/subscriptions.go b/internal/orchestrator/subscriptions.go index af12327d3d..63ac480f96 100644 --- a/internal/orchestrator/subscriptions.go +++ b/internal/orchestrator/subscriptions.go @@ -19,12 +19,12 @@ package orchestrator import ( "context" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly/internal/coremsgs" "github.com/hyperledger/firefly/internal/events/system" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" ) func (or *orchestrator) CreateSubscription(ctx context.Context, subDef *core.Subscription) (*core.Subscription, error) { @@ -65,7 +65,7 @@ func (or *orchestrator) DeleteSubscription(ctx context.Context, id string) error return or.events.DeleteDurableSubscription(ctx, sub) } -func (or *orchestrator) GetSubscriptions(ctx context.Context, filter database.AndFilter) ([]*core.Subscription, *database.FilterResult, error) { +func (or *orchestrator) GetSubscriptions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Subscription, *ffapi.FilterResult, error) { return or.database().GetSubscriptions(ctx, or.namespace.Name, filter) } diff --git a/internal/privatemessaging/groupmanager.go b/internal/privatemessaging/groupmanager.go index 14f6206ced..0a343be16d 100644 --- a/internal/privatemessaging/groupmanager.go +++ b/internal/privatemessaging/groupmanager.go @@ -20,6 +20,7 @@ import ( "context" "encoding/json" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -33,7 +34,7 @@ import ( type GroupManager interface { GetGroupByID(ctx context.Context, id string) (*core.Group, error) - GetGroups(ctx context.Context, filter database.AndFilter) ([]*core.Group, *database.FilterResult, error) + GetGroups(ctx context.Context, filter ffapi.AndFilter) ([]*core.Group, *ffapi.FilterResult, error) ResolveInitGroup(ctx context.Context, msg *core.Message, creator *core.Member) (*core.Group, error) EnsureLocalGroup(ctx context.Context, group *core.Group, creator *core.Member) (ok bool, err error) } @@ -173,7 +174,7 @@ func (gm *groupManager) GetGroupByID(ctx context.Context, hash string) (*core.Gr return gm.database.GetGroupByHash(ctx, gm.namespace.Name, h) } -func (gm *groupManager) GetGroups(ctx context.Context, filter database.AndFilter) ([]*core.Group, *database.FilterResult, error) { +func (gm *groupManager) GetGroups(ctx context.Context, filter ffapi.AndFilter) ([]*core.Group, *ffapi.FilterResult, error) { return gm.database.GetGroups(ctx, gm.namespace.Name, filter) } diff --git a/internal/shareddownload/download_manager_test.go b/internal/shareddownload/download_manager_test.go index c84ddc6094..b1c1d9f58c 100644 --- a/internal/shareddownload/download_manager_test.go +++ b/internal/shareddownload/download_manager_test.go @@ -25,6 +25,7 @@ import ( "time" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/internal/coreconfig" "github.com/hyperledger/firefly/mocks/databasemocks" @@ -234,7 +235,7 @@ func TestDownloadManagerStartupRecoveryCombinations(t *testing.T) { mdi := dm.database.(*databasemocks.Plugin) mdi.On("GetOperations", mock.Anything, "ns1", mock.Anything).Return([]*core.Operation{}, nil, fmt.Errorf("initial error")).Once() - mdi.On("GetOperations", mock.Anything, "ns1", mock.MatchedBy(func(filter database.Filter) bool { + mdi.On("GetOperations", mock.Anything, "ns1", mock.MatchedBy(func(filter ffapi.Filter) bool { fi, err := filter.Finalize() assert.NoError(t, err) return fi.Skip == 0 && fi.Limit == 25 @@ -270,7 +271,7 @@ func TestDownloadManagerStartupRecoveryCombinations(t *testing.T) { }, }, }, nil, nil).Once() - mdi.On("GetOperations", mock.Anything, "ns1", mock.MatchedBy(func(filter database.Filter) bool { + mdi.On("GetOperations", mock.Anything, "ns1", mock.MatchedBy(func(filter ffapi.Filter) bool { fi, err := filter.Finalize() assert.NoError(t, err) return fi.Skip == 25 && fi.Limit == 25 diff --git a/internal/txcommon/txcommon_test.go b/internal/txcommon/txcommon_test.go index 5972333e07..dda4a479ff 100644 --- a/internal/txcommon/txcommon_test.go +++ b/internal/txcommon/txcommon_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/internal/cache" "github.com/hyperledger/firefly/internal/coreconfig" @@ -379,7 +380,7 @@ func TestAddBlockchainTX(t *testing.T) { Created: fftypes.Now(), BlockchainIDs: fftypes.FFStringArray{"0x111111"}, } - mdi.On("UpdateTransaction", ctx, "ns1", tx.ID, mock.MatchedBy(func(u database.Update) bool { + mdi.On("UpdateTransaction", ctx, "ns1", tx.ID, mock.MatchedBy(func(u ffapi.Update) bool { info, _ := u.Finalize() assert.Equal(t, 1, len(info.SetOperations)) assert.Equal(t, "blockchainids", info.SetOperations[0].Field) diff --git a/mocks/assetmocks/manager.go b/mocks/assetmocks/manager.go index e1a2d86bfd..230043a4ad 100644 --- a/mocks/assetmocks/manager.go +++ b/mocks/assetmocks/manager.go @@ -5,8 +5,8 @@ package assetmocks import ( context "context" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" core "github.com/hyperledger/firefly/pkg/core" - database "github.com/hyperledger/firefly/pkg/database" fftypes "github.com/hyperledger/firefly-common/pkg/fftypes" @@ -81,11 +81,11 @@ func (_m *Manager) CreateTokenPool(ctx context.Context, pool *core.TokenPoolInpu } // GetTokenAccountPools provides a mock function with given fields: ctx, key, filter -func (_m *Manager) GetTokenAccountPools(ctx context.Context, key string, filter database.AndFilter) ([]*core.TokenAccountPool, *database.FilterResult, error) { +func (_m *Manager) GetTokenAccountPools(ctx context.Context, key string, filter ffapi.AndFilter) ([]*core.TokenAccountPool, *ffapi.FilterResult, error) { ret := _m.Called(ctx, key, filter) var r0 []*core.TokenAccountPool - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.TokenAccountPool); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.TokenAccountPool); ok { r0 = rf(ctx, key, filter) } else { if ret.Get(0) != nil { @@ -93,17 +93,17 @@ func (_m *Manager) GetTokenAccountPools(ctx context.Context, key string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, key, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, key, filter) } else { r2 = ret.Error(2) @@ -113,11 +113,11 @@ func (_m *Manager) GetTokenAccountPools(ctx context.Context, key string, filter } // GetTokenAccounts provides a mock function with given fields: ctx, filter -func (_m *Manager) GetTokenAccounts(ctx context.Context, filter database.AndFilter) ([]*core.TokenAccount, *database.FilterResult, error) { +func (_m *Manager) GetTokenAccounts(ctx context.Context, filter ffapi.AndFilter) ([]*core.TokenAccount, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.TokenAccount - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.TokenAccount); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.TokenAccount); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -125,17 +125,17 @@ func (_m *Manager) GetTokenAccounts(ctx context.Context, filter database.AndFilt } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -145,11 +145,11 @@ func (_m *Manager) GetTokenAccounts(ctx context.Context, filter database.AndFilt } // GetTokenApprovals provides a mock function with given fields: ctx, filter -func (_m *Manager) GetTokenApprovals(ctx context.Context, filter database.AndFilter) ([]*core.TokenApproval, *database.FilterResult, error) { +func (_m *Manager) GetTokenApprovals(ctx context.Context, filter ffapi.AndFilter) ([]*core.TokenApproval, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.TokenApproval - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.TokenApproval); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.TokenApproval); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -157,17 +157,17 @@ func (_m *Manager) GetTokenApprovals(ctx context.Context, filter database.AndFil } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -177,11 +177,11 @@ func (_m *Manager) GetTokenApprovals(ctx context.Context, filter database.AndFil } // GetTokenBalances provides a mock function with given fields: ctx, filter -func (_m *Manager) GetTokenBalances(ctx context.Context, filter database.AndFilter) ([]*core.TokenBalance, *database.FilterResult, error) { +func (_m *Manager) GetTokenBalances(ctx context.Context, filter ffapi.AndFilter) ([]*core.TokenBalance, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.TokenBalance - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.TokenBalance); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.TokenBalance); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -189,17 +189,17 @@ func (_m *Manager) GetTokenBalances(ctx context.Context, filter database.AndFilt } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -271,11 +271,11 @@ func (_m *Manager) GetTokenPoolByNameOrID(ctx context.Context, poolNameOrID stri } // GetTokenPools provides a mock function with given fields: ctx, filter -func (_m *Manager) GetTokenPools(ctx context.Context, filter database.AndFilter) ([]*core.TokenPool, *database.FilterResult, error) { +func (_m *Manager) GetTokenPools(ctx context.Context, filter ffapi.AndFilter) ([]*core.TokenPool, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.TokenPool - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.TokenPool); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.TokenPool); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -283,17 +283,17 @@ func (_m *Manager) GetTokenPools(ctx context.Context, filter database.AndFilter) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -326,11 +326,11 @@ func (_m *Manager) GetTokenTransferByID(ctx context.Context, id string) (*core.T } // GetTokenTransfers provides a mock function with given fields: ctx, filter -func (_m *Manager) GetTokenTransfers(ctx context.Context, filter database.AndFilter) ([]*core.TokenTransfer, *database.FilterResult, error) { +func (_m *Manager) GetTokenTransfers(ctx context.Context, filter ffapi.AndFilter) ([]*core.TokenTransfer, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.TokenTransfer - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.TokenTransfer); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.TokenTransfer); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -338,17 +338,17 @@ func (_m *Manager) GetTokenTransfers(ctx context.Context, filter database.AndFil } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) diff --git a/mocks/contractmocks/manager.go b/mocks/contractmocks/manager.go index 57472ae13c..298fe7e13a 100644 --- a/mocks/contractmocks/manager.go +++ b/mocks/contractmocks/manager.go @@ -7,7 +7,7 @@ import ( core "github.com/hyperledger/firefly/pkg/core" - database "github.com/hyperledger/firefly/pkg/database" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" fftypes "github.com/hyperledger/firefly-common/pkg/fftypes" @@ -172,11 +172,11 @@ func (_m *Manager) GetContractAPIInterface(ctx context.Context, apiName string) } // GetContractAPIListeners provides a mock function with given fields: ctx, apiName, eventPath, filter -func (_m *Manager) GetContractAPIListeners(ctx context.Context, apiName string, eventPath string, filter database.AndFilter) ([]*core.ContractListener, *database.FilterResult, error) { +func (_m *Manager) GetContractAPIListeners(ctx context.Context, apiName string, eventPath string, filter ffapi.AndFilter) ([]*core.ContractListener, *ffapi.FilterResult, error) { ret := _m.Called(ctx, apiName, eventPath, filter) var r0 []*core.ContractListener - if rf, ok := ret.Get(0).(func(context.Context, string, string, database.AndFilter) []*core.ContractListener); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, string, ffapi.AndFilter) []*core.ContractListener); ok { r0 = rf(ctx, apiName, eventPath, filter) } else { if ret.Get(0) != nil { @@ -184,17 +184,17 @@ func (_m *Manager) GetContractAPIListeners(ctx context.Context, apiName string, } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, apiName, eventPath, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, apiName, eventPath, filter) } else { r2 = ret.Error(2) @@ -204,11 +204,11 @@ func (_m *Manager) GetContractAPIListeners(ctx context.Context, apiName string, } // GetContractAPIs provides a mock function with given fields: ctx, httpServerURL, filter -func (_m *Manager) GetContractAPIs(ctx context.Context, httpServerURL string, filter database.AndFilter) ([]*core.ContractAPI, *database.FilterResult, error) { +func (_m *Manager) GetContractAPIs(ctx context.Context, httpServerURL string, filter ffapi.AndFilter) ([]*core.ContractAPI, *ffapi.FilterResult, error) { ret := _m.Called(ctx, httpServerURL, filter) var r0 []*core.ContractAPI - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.ContractAPI); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.ContractAPI); ok { r0 = rf(ctx, httpServerURL, filter) } else { if ret.Get(0) != nil { @@ -216,17 +216,17 @@ func (_m *Manager) GetContractAPIs(ctx context.Context, httpServerURL string, fi } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, httpServerURL, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, httpServerURL, filter) } else { r2 = ret.Error(2) @@ -282,11 +282,11 @@ func (_m *Manager) GetContractListenerByNameOrIDWithStatus(ctx context.Context, } // GetContractListeners provides a mock function with given fields: ctx, filter -func (_m *Manager) GetContractListeners(ctx context.Context, filter database.AndFilter) ([]*core.ContractListener, *database.FilterResult, error) { +func (_m *Manager) GetContractListeners(ctx context.Context, filter ffapi.AndFilter) ([]*core.ContractListener, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.ContractListener - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.ContractListener); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.ContractListener); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -294,17 +294,17 @@ func (_m *Manager) GetContractListeners(ctx context.Context, filter database.And } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -406,11 +406,11 @@ func (_m *Manager) GetFFIWithChildren(ctx context.Context, name string, version } // GetFFIs provides a mock function with given fields: ctx, filter -func (_m *Manager) GetFFIs(ctx context.Context, filter database.AndFilter) ([]*fftypes.FFI, *database.FilterResult, error) { +func (_m *Manager) GetFFIs(ctx context.Context, filter ffapi.AndFilter) ([]*fftypes.FFI, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*fftypes.FFI - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*fftypes.FFI); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*fftypes.FFI); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -418,17 +418,17 @@ func (_m *Manager) GetFFIs(ctx context.Context, filter database.AndFilter) ([]*f } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) diff --git a/mocks/networkmapmocks/manager.go b/mocks/networkmapmocks/manager.go index 02ac29fb42..5d9f7dbaf3 100644 --- a/mocks/networkmapmocks/manager.go +++ b/mocks/networkmapmocks/manager.go @@ -5,8 +5,8 @@ package networkmapmocks import ( context "context" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" core "github.com/hyperledger/firefly/pkg/core" - database "github.com/hyperledger/firefly/pkg/database" mock "github.com/stretchr/testify/mock" @@ -65,11 +65,11 @@ func (_m *Manager) GetDIDDocForIndentityByID(ctx context.Context, id string) (*n } // GetIdentities provides a mock function with given fields: ctx, filter -func (_m *Manager) GetIdentities(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (_m *Manager) GetIdentities(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Identity - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Identity); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Identity); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -77,17 +77,17 @@ func (_m *Manager) GetIdentities(ctx context.Context, filter database.AndFilter) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -97,11 +97,11 @@ func (_m *Manager) GetIdentities(ctx context.Context, filter database.AndFilter) } // GetIdentitiesWithVerifiers provides a mock function with given fields: ctx, filter -func (_m *Manager) GetIdentitiesWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) { +func (_m *Manager) GetIdentitiesWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.IdentityWithVerifiers - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.IdentityWithVerifiers); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.IdentityWithVerifiers); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -109,17 +109,17 @@ func (_m *Manager) GetIdentitiesWithVerifiers(ctx context.Context, filter databa } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -221,11 +221,11 @@ func (_m *Manager) GetIdentityByIDWithVerifiers(ctx context.Context, id string) } // GetIdentityVerifiers provides a mock function with given fields: ctx, id, filter -func (_m *Manager) GetIdentityVerifiers(ctx context.Context, id string, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) { +func (_m *Manager) GetIdentityVerifiers(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) { ret := _m.Called(ctx, id, filter) var r0 []*core.Verifier - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.Verifier); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.Verifier); ok { r0 = rf(ctx, id, filter) } else { if ret.Get(0) != nil { @@ -233,17 +233,17 @@ func (_m *Manager) GetIdentityVerifiers(ctx context.Context, id string, filter d } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, id, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, id, filter) } else { r2 = ret.Error(2) @@ -276,11 +276,11 @@ func (_m *Manager) GetNodeByNameOrID(ctx context.Context, nameOrID string) (*cor } // GetNodes provides a mock function with given fields: ctx, filter -func (_m *Manager) GetNodes(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (_m *Manager) GetNodes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Identity - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Identity); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Identity); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -288,17 +288,17 @@ func (_m *Manager) GetNodes(ctx context.Context, filter database.AndFilter) ([]* } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -331,11 +331,11 @@ func (_m *Manager) GetOrganizationByNameOrID(ctx context.Context, nameOrID strin } // GetOrganizations provides a mock function with given fields: ctx, filter -func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*core.Identity, *database.FilterResult, error) { +func (_m *Manager) GetOrganizations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Identity, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Identity - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Identity); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Identity); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -343,17 +343,17 @@ func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilt } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -363,11 +363,11 @@ func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilt } // GetOrganizationsWithVerifiers provides a mock function with given fields: ctx, filter -func (_m *Manager) GetOrganizationsWithVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.IdentityWithVerifiers, *database.FilterResult, error) { +func (_m *Manager) GetOrganizationsWithVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.IdentityWithVerifiers, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.IdentityWithVerifiers - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.IdentityWithVerifiers); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.IdentityWithVerifiers); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -375,17 +375,17 @@ func (_m *Manager) GetOrganizationsWithVerifiers(ctx context.Context, filter dat } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -418,11 +418,11 @@ func (_m *Manager) GetVerifierByHash(ctx context.Context, hash string) (*core.Ve } // GetVerifiers provides a mock function with given fields: ctx, filter -func (_m *Manager) GetVerifiers(ctx context.Context, filter database.AndFilter) ([]*core.Verifier, *database.FilterResult, error) { +func (_m *Manager) GetVerifiers(ctx context.Context, filter ffapi.AndFilter) ([]*core.Verifier, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Verifier - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Verifier); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Verifier); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -430,17 +430,17 @@ func (_m *Manager) GetVerifiers(ctx context.Context, filter database.AndFilter) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) diff --git a/mocks/orchestratormocks/orchestrator.go b/mocks/orchestratormocks/orchestrator.go index 675500f659..af2668652a 100644 --- a/mocks/orchestratormocks/orchestrator.go +++ b/mocks/orchestratormocks/orchestrator.go @@ -22,6 +22,8 @@ import ( events "github.com/hyperledger/firefly/internal/events" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" + fftypes "github.com/hyperledger/firefly-common/pkg/fftypes" identity "github.com/hyperledger/firefly/internal/identity" @@ -252,11 +254,11 @@ func (_m *Orchestrator) GetBatchByID(ctx context.Context, id string) (*core.Batc } // GetBatches provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetBatches(ctx context.Context, filter database.AndFilter) ([]*core.BatchPersisted, *database.FilterResult, error) { +func (_m *Orchestrator) GetBatches(ctx context.Context, filter ffapi.AndFilter) ([]*core.BatchPersisted, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.BatchPersisted - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.BatchPersisted); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.BatchPersisted); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -264,17 +266,17 @@ func (_m *Orchestrator) GetBatches(ctx context.Context, filter database.AndFilte } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -307,11 +309,11 @@ func (_m *Orchestrator) GetBlockchainEventByID(ctx context.Context, id string) ( } // GetBlockchainEvents provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetBlockchainEvents(ctx context.Context, filter database.AndFilter) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (_m *Orchestrator) GetBlockchainEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.BlockchainEvent - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.BlockchainEvent); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.BlockchainEvent); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -319,17 +321,17 @@ func (_m *Orchestrator) GetBlockchainEvents(ctx context.Context, filter database } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -362,11 +364,11 @@ func (_m *Orchestrator) GetChartHistogram(ctx context.Context, startTime int64, } // GetData provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetData(ctx context.Context, filter database.AndFilter) (core.DataArray, *database.FilterResult, error) { +func (_m *Orchestrator) GetData(ctx context.Context, filter ffapi.AndFilter) (core.DataArray, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 core.DataArray - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) core.DataArray); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) core.DataArray); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -374,17 +376,17 @@ func (_m *Orchestrator) GetData(ctx context.Context, filter database.AndFilter) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -463,11 +465,11 @@ func (_m *Orchestrator) GetDatatypeByName(ctx context.Context, name string, vers } // GetDatatypes provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetDatatypes(ctx context.Context, filter database.AndFilter) ([]*core.Datatype, *database.FilterResult, error) { +func (_m *Orchestrator) GetDatatypes(ctx context.Context, filter ffapi.AndFilter) ([]*core.Datatype, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Datatype - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Datatype); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Datatype); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -475,17 +477,17 @@ func (_m *Orchestrator) GetDatatypes(ctx context.Context, filter database.AndFil } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -541,11 +543,11 @@ func (_m *Orchestrator) GetEventByIDWithReference(ctx context.Context, id string } // GetEvents provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetEvents(ctx context.Context, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) { +func (_m *Orchestrator) GetEvents(ctx context.Context, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Event - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Event); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Event); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -553,17 +555,17 @@ func (_m *Orchestrator) GetEvents(ctx context.Context, filter database.AndFilter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -573,11 +575,11 @@ func (_m *Orchestrator) GetEvents(ctx context.Context, filter database.AndFilter } // GetEventsWithReferences provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetEventsWithReferences(ctx context.Context, filter database.AndFilter) ([]*core.EnrichedEvent, *database.FilterResult, error) { +func (_m *Orchestrator) GetEventsWithReferences(ctx context.Context, filter ffapi.AndFilter) ([]*core.EnrichedEvent, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.EnrichedEvent - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.EnrichedEvent); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.EnrichedEvent); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -585,17 +587,17 @@ func (_m *Orchestrator) GetEventsWithReferences(ctx context.Context, filter data } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -674,11 +676,11 @@ func (_m *Orchestrator) GetMessageData(ctx context.Context, id string) (core.Dat } // GetMessageEvents provides a mock function with given fields: ctx, id, filter -func (_m *Orchestrator) GetMessageEvents(ctx context.Context, id string, filter database.AndFilter) ([]*core.Event, *database.FilterResult, error) { +func (_m *Orchestrator) GetMessageEvents(ctx context.Context, id string, filter ffapi.AndFilter) ([]*core.Event, *ffapi.FilterResult, error) { ret := _m.Called(ctx, id, filter) var r0 []*core.Event - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.Event); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.Event); ok { r0 = rf(ctx, id, filter) } else { if ret.Get(0) != nil { @@ -686,17 +688,17 @@ func (_m *Orchestrator) GetMessageEvents(ctx context.Context, id string, filter } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, id, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, id, filter) } else { r2 = ret.Error(2) @@ -729,11 +731,11 @@ func (_m *Orchestrator) GetMessageTransaction(ctx context.Context, id string) (* } // GetMessages provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetMessages(ctx context.Context, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) { +func (_m *Orchestrator) GetMessages(ctx context.Context, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Message - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Message); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -741,17 +743,17 @@ func (_m *Orchestrator) GetMessages(ctx context.Context, filter database.AndFilt } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -761,11 +763,11 @@ func (_m *Orchestrator) GetMessages(ctx context.Context, filter database.AndFilt } // GetMessagesForData provides a mock function with given fields: ctx, dataID, filter -func (_m *Orchestrator) GetMessagesForData(ctx context.Context, dataID string, filter database.AndFilter) ([]*core.Message, *database.FilterResult, error) { +func (_m *Orchestrator) GetMessagesForData(ctx context.Context, dataID string, filter ffapi.AndFilter) ([]*core.Message, *ffapi.FilterResult, error) { ret := _m.Called(ctx, dataID, filter) var r0 []*core.Message - if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*core.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.AndFilter) []*core.Message); ok { r0 = rf(ctx, dataID, filter) } else { if ret.Get(0) != nil { @@ -773,17 +775,17 @@ func (_m *Orchestrator) GetMessagesForData(ctx context.Context, dataID string, f } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, dataID, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, string, ffapi.AndFilter) error); ok { r2 = rf(ctx, dataID, filter) } else { r2 = ret.Error(2) @@ -793,11 +795,11 @@ func (_m *Orchestrator) GetMessagesForData(ctx context.Context, dataID string, f } // GetMessagesWithData provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetMessagesWithData(ctx context.Context, filter database.AndFilter) ([]*core.MessageInOut, *database.FilterResult, error) { +func (_m *Orchestrator) GetMessagesWithData(ctx context.Context, filter ffapi.AndFilter) ([]*core.MessageInOut, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.MessageInOut - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.MessageInOut); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.MessageInOut); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -805,17 +807,17 @@ func (_m *Orchestrator) GetMessagesWithData(ctx context.Context, filter database } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -864,11 +866,11 @@ func (_m *Orchestrator) GetOperationByID(ctx context.Context, id string) (*core. } // GetOperations provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetOperations(ctx context.Context, filter database.AndFilter) ([]*core.Operation, *database.FilterResult, error) { +func (_m *Orchestrator) GetOperations(ctx context.Context, filter ffapi.AndFilter) ([]*core.Operation, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Operation - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Operation); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Operation); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -876,17 +878,17 @@ func (_m *Orchestrator) GetOperations(ctx context.Context, filter database.AndFi } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -896,11 +898,11 @@ func (_m *Orchestrator) GetOperations(ctx context.Context, filter database.AndFi } // GetPins provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetPins(ctx context.Context, filter database.AndFilter) ([]*core.Pin, *database.FilterResult, error) { +func (_m *Orchestrator) GetPins(ctx context.Context, filter ffapi.AndFilter) ([]*core.Pin, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Pin - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Pin); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Pin); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -908,17 +910,17 @@ func (_m *Orchestrator) GetPins(ctx context.Context, filter database.AndFilter) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -997,11 +999,11 @@ func (_m *Orchestrator) GetSubscriptionByIDWithStatus(ctx context.Context, id st } // GetSubscriptions provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetSubscriptions(ctx context.Context, filter database.AndFilter) ([]*core.Subscription, *database.FilterResult, error) { +func (_m *Orchestrator) GetSubscriptions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Subscription, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Subscription - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Subscription); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Subscription); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -1009,17 +1011,17 @@ func (_m *Orchestrator) GetSubscriptions(ctx context.Context, filter database.An } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) @@ -1029,7 +1031,7 @@ func (_m *Orchestrator) GetSubscriptions(ctx context.Context, filter database.An } // GetTransactionBlockchainEvents provides a mock function with given fields: ctx, id -func (_m *Orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *database.FilterResult, error) { +func (_m *Orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id string) ([]*core.BlockchainEvent, *ffapi.FilterResult, error) { ret := _m.Called(ctx, id) var r0 []*core.BlockchainEvent @@ -1041,12 +1043,12 @@ func (_m *Orchestrator) GetTransactionBlockchainEvents(ctx context.Context, id s } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string) *ffapi.FilterResult); ok { r1 = rf(ctx, id) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } @@ -1084,7 +1086,7 @@ func (_m *Orchestrator) GetTransactionByID(ctx context.Context, id string) (*cor } // GetTransactionOperations provides a mock function with given fields: ctx, id -func (_m *Orchestrator) GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *database.FilterResult, error) { +func (_m *Orchestrator) GetTransactionOperations(ctx context.Context, id string) ([]*core.Operation, *ffapi.FilterResult, error) { ret := _m.Called(ctx, id) var r0 []*core.Operation @@ -1096,12 +1098,12 @@ func (_m *Orchestrator) GetTransactionOperations(ctx context.Context, id string) } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, string) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string) *ffapi.FilterResult); ok { r1 = rf(ctx, id) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } @@ -1139,11 +1141,11 @@ func (_m *Orchestrator) GetTransactionStatus(ctx context.Context, id string) (*c } // GetTransactions provides a mock function with given fields: ctx, filter -func (_m *Orchestrator) GetTransactions(ctx context.Context, filter database.AndFilter) ([]*core.Transaction, *database.FilterResult, error) { +func (_m *Orchestrator) GetTransactions(ctx context.Context, filter ffapi.AndFilter) ([]*core.Transaction, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Transaction - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Transaction); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Transaction); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -1151,17 +1153,17 @@ func (_m *Orchestrator) GetTransactions(ctx context.Context, filter database.And } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) diff --git a/mocks/privatemessagingmocks/manager.go b/mocks/privatemessagingmocks/manager.go index 5f3d88fc84..7400ed42df 100644 --- a/mocks/privatemessagingmocks/manager.go +++ b/mocks/privatemessagingmocks/manager.go @@ -5,8 +5,8 @@ package privatemessagingmocks import ( context "context" + ffapi "github.com/hyperledger/firefly-common/pkg/ffapi" core "github.com/hyperledger/firefly/pkg/core" - database "github.com/hyperledger/firefly/pkg/database" fftypes "github.com/hyperledger/firefly-common/pkg/fftypes" @@ -65,11 +65,11 @@ func (_m *Manager) GetGroupByID(ctx context.Context, id string) (*core.Group, er } // GetGroups provides a mock function with given fields: ctx, filter -func (_m *Manager) GetGroups(ctx context.Context, filter database.AndFilter) ([]*core.Group, *database.FilterResult, error) { +func (_m *Manager) GetGroups(ctx context.Context, filter ffapi.AndFilter) ([]*core.Group, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) var r0 []*core.Group - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*core.Group); ok { + if rf, ok := ret.Get(0).(func(context.Context, ffapi.AndFilter) []*core.Group); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { @@ -77,17 +77,17 @@ func (_m *Manager) GetGroups(ctx context.Context, filter database.AndFilter) ([] } } - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.AndFilter) *database.FilterResult); ok { + var r1 *ffapi.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, ffapi.AndFilter) *ffapi.FilterResult); ok { r1 = rf(ctx, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) + r1 = ret.Get(1).(*ffapi.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.AndFilter) error); ok { + if rf, ok := ret.Get(2).(func(context.Context, ffapi.AndFilter) error); ok { r2 = rf(ctx, filter) } else { r2 = ret.Error(2) From 24c648045d5458215ce0f6d08f296b21a6929deb Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 4 Dec 2022 22:05:25 -0500 Subject: [PATCH 3/5] Ensure no net changes to swagger Signed-off-by: Peter Broadhurst --- .../apiserver/route_get_token_transfers.go | 2 +- .../route_get_token_transfers_test.go | 6 ++-- internal/apiserver/server.go | 28 +++++++++---------- internal/apiserver/server_test.go | 8 +++--- internal/apiserver/swagger_check_test.go | 1 + internal/apiserver/swagger_generate_test.go | 1 + internal/broadcast/operations_test.go | 6 ++-- internal/data/message_writer_test.go | 13 +++++---- internal/database/postgres/postgres.go | 9 ++++-- internal/database/postgres/postgres_test.go | 1 + internal/database/sqlite3/sqlite3.go | 9 ++++-- internal/database/sqlite3/sqlite3_test.go | 1 + internal/events/aggregator_test.go | 5 ++-- internal/events/event_poller_test.go | 7 +++-- 14 files changed, 58 insertions(+), 39 deletions(-) diff --git a/internal/apiserver/route_get_token_transfers.go b/internal/apiserver/route_get_token_transfers.go index 6d8f392215..4f31c1e2e7 100644 --- a/internal/apiserver/route_get_token_transfers.go +++ b/internal/apiserver/route_get_token_transfers.go @@ -43,7 +43,7 @@ var getTokenTransfers = &ffapi.Route{ filter := r.Filter if fromOrTo, ok := r.QP["fromOrTo"]; ok { fb := database.TokenTransferQueryFactory.NewFilter(cr.ctx) - filter.Condition( + filter = filter.Condition( fb.Or(). Condition(fb.Eq("from", fromOrTo)). Condition(fb.Eq("to", fromOrTo))) diff --git a/internal/apiserver/route_get_token_transfers_test.go b/internal/apiserver/route_get_token_transfers_test.go index d00a379e73..20481b286b 100644 --- a/internal/apiserver/route_get_token_transfers_test.go +++ b/internal/apiserver/route_get_token_transfers_test.go @@ -18,6 +18,7 @@ package apiserver import ( "net/http/httptest" + "strings" "testing" "github.com/hyperledger/firefly-common/pkg/ffapi" @@ -53,8 +54,9 @@ func TestGetTokenTransfersFromOrTo(t *testing.T) { res := httptest.NewRecorder() mam.On("GetTokenTransfers", mock.Anything, mock.MatchedBy(func(filter ffapi.AndFilter) bool { - info, _ := filter.Finalize() - return info.String() == "( ( from == '0x1' ) || ( to == '0x1' ) )" + f, _ := filter.Finalize() + filterStr := f.String() + return strings.Contains(filterStr, "( ( from == '0x1' ) || ( to == '0x1' ) )") })).Return([]*core.TokenTransfer{}, nil, nil) r.ServeHTTP(res, req) diff --git a/internal/apiserver/server.go b/internal/apiserver/server.go index 1b9ee7b37d..e4efb3ef8d 100644 --- a/internal/apiserver/server.go +++ b/internal/apiserver/server.go @@ -57,13 +57,10 @@ type Server interface { type apiServer struct { // Defaults set with config - defaultFilterLimit uint64 - maxFilterLimit uint64 - maxFilterSkip uint64 - apiTimeout time.Duration - apiMaxTimeout time.Duration - metricsEnabled bool - ffiSwaggerGen FFISwaggerGen + apiTimeout time.Duration + apiMaxTimeout time.Duration + metricsEnabled bool + ffiSwaggerGen FFISwaggerGen } func InitConfig() { @@ -76,13 +73,10 @@ func InitConfig() { func NewAPIServer() Server { return &apiServer{ - defaultFilterLimit: uint64(config.GetUint(coreconfig.APIDefaultFilterLimit)), - maxFilterLimit: uint64(config.GetUint(coreconfig.APIMaxFilterLimit)), - maxFilterSkip: uint64(config.GetUint(coreconfig.APIMaxFilterSkip)), - apiTimeout: config.GetDuration(coreconfig.APIRequestTimeout), - apiMaxTimeout: config.GetDuration(coreconfig.APIRequestMaxTimeout), - metricsEnabled: config.GetBool(coreconfig.MetricsEnabled), - ffiSwaggerGen: NewFFISwaggerGen(), + apiTimeout: config.GetDuration(coreconfig.APIRequestTimeout), + apiMaxTimeout: config.GetDuration(coreconfig.APIRequestMaxTimeout), + metricsEnabled: config.GetBool(coreconfig.MetricsEnabled), + ffiSwaggerGen: NewFFISwaggerGen(), } } @@ -158,6 +152,9 @@ func (as *apiServer) swaggerGenConf(apiBaseURL string) *ffapi.Options { Version: "1.0", PanicOnMissingDescription: config.GetBool(coreconfig.APIOASPanicOnMissingDescription), DefaultRequestTimeout: config.GetDuration(coreconfig.APIRequestTimeout), + APIDefaultFilterLimit: config.GetString(coreconfig.APIDefaultFilterLimit), + APIMaxFilterLimit: config.GetUint(coreconfig.APIMaxFilterLimit), + APIMaxFilterSkip: config.GetUint(coreconfig.APIMaxFilterSkip), } } @@ -289,6 +286,9 @@ func (as *apiServer) routeHandler(hf *ffapi.HandlerFactory, mgr namespace.Manage func (as *apiServer) handlerFactory() *ffapi.HandlerFactory { return &ffapi.HandlerFactory{ + DefaultFilterLimit: uint64(config.GetUint(coreconfig.APIDefaultFilterLimit)), + MaxFilterLimit: uint64(config.GetUint(coreconfig.APIMaxFilterLimit)), + MaxFilterSkip: uint64(config.GetUint(coreconfig.APIMaxFilterSkip)), DefaultRequestTimeout: config.GetDuration(coreconfig.APIRequestTimeout), MaxTimeout: config.GetDuration(coreconfig.APIRequestMaxTimeout), } diff --git a/internal/apiserver/server_test.go b/internal/apiserver/server_test.go index e0a9fcf5bb..04dab4ae28 100644 --- a/internal/apiserver/server_test.go +++ b/internal/apiserver/server_test.go @@ -56,10 +56,10 @@ func newTestServer() (*namespacemocks.Manager, *orchestratormocks.Orchestrator, mgr.On("Orchestrator", "default").Return(o).Maybe() mgr.On("Orchestrator", "mynamespace").Return(o).Maybe() mgr.On("Orchestrator", "ns1").Return(o).Maybe() + config.Set(coreconfig.APIMaxFilterLimit, 100) as := &apiServer{ - apiTimeout: 5 * time.Second, - maxFilterLimit: 100, - ffiSwaggerGen: &apiservermocks.FFISwaggerGen{}, + apiTimeout: 5 * time.Second, + ffiSwaggerGen: &apiservermocks.FFISwaggerGen{}, } return mgr, o, as } @@ -204,7 +204,7 @@ func TestFilterTooMany(t *testing.T) { assert.Equal(t, 400, res.Result().StatusCode) var resJSON map[string]interface{} json.NewDecoder(res.Body).Decode(&resJSON) - assert.Regexp(t, "FF10184", resJSON["error"]) + assert.Regexp(t, "FF00192", resJSON["error"]) } func TestUnauthorized(t *testing.T) { diff --git a/internal/apiserver/swagger_check_test.go b/internal/apiserver/swagger_check_test.go index 1246a6022c..fb444326dd 100644 --- a/internal/apiserver/swagger_check_test.go +++ b/internal/apiserver/swagger_check_test.go @@ -38,6 +38,7 @@ import ( ) func TestDiffSwaggerYAML(t *testing.T) { + coreconfig.Reset() config.Set(coreconfig.APIOASPanicOnMissingDescription, true) as := &apiServer{} hf := as.handlerFactory() diff --git a/internal/apiserver/swagger_generate_test.go b/internal/apiserver/swagger_generate_test.go index a313a2ab11..bd03ce5ff9 100644 --- a/internal/apiserver/swagger_generate_test.go +++ b/internal/apiserver/swagger_generate_test.go @@ -36,6 +36,7 @@ import ( ) func TestDownloadSwaggerYAML(t *testing.T) { + coreconfig.Reset() config.Set(coreconfig.APIOASPanicOnMissingDescription, true) as := &apiServer{} hf := as.handlerFactory() diff --git a/internal/broadcast/operations_test.go b/internal/broadcast/operations_test.go index d3cba856c4..f94af83a25 100644 --- a/internal/broadcast/operations_test.go +++ b/internal/broadcast/operations_test.go @@ -22,13 +22,13 @@ import ( "strings" "testing" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/mocks/datamocks" "github.com/hyperledger/firefly/mocks/sharedstoragemocks" "github.com/hyperledger/firefly/pkg/core" - "github.com/hyperledger/firefly/pkg/database" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -258,7 +258,7 @@ func TestPrepareAndRunUploadBlob(t *testing.T) { mdi.On("GetBlobMatchingHash", mock.Anything, blob.Hash).Return(blob, nil) mps.On("UploadData", context.Background(), mock.Anything).Return("123", nil) mdx.On("DownloadBlob", context.Background(), mock.Anything).Return(reader, nil) - mdi.On("UpdateData", context.Background(), "ns1", data.ID, mock.MatchedBy(func(update database.Update) bool { + mdi.On("UpdateData", context.Background(), "ns1", data.ID, mock.MatchedBy(func(update ffapi.Update) bool { info, _ := update.Finalize() assert.Equal(t, 1, len(info.SetOperations)) assert.Equal(t, "blob.public", info.SetOperations[0].Field) @@ -302,7 +302,7 @@ func TestPrepareAndRunValue(t *testing.T) { mdi.On("GetDataByID", mock.Anything, "ns1", data.ID, false).Return(data, nil) mps.On("UploadData", context.Background(), mock.Anything).Return("123", nil) - mdi.On("UpdateData", context.Background(), "ns1", data.ID, mock.MatchedBy(func(update database.Update) bool { + mdi.On("UpdateData", context.Background(), "ns1", data.ID, mock.MatchedBy(func(update ffapi.Update) bool { info, _ := update.Finalize() assert.Equal(t, 1, len(info.SetOperations)) assert.Equal(t, "public", info.SetOperations[0].Field) diff --git a/internal/data/message_writer_test.go b/internal/data/message_writer_test.go index d162b0364f..9604877b17 100644 --- a/internal/data/message_writer_test.go +++ b/internal/data/message_writer_test.go @@ -23,6 +23,7 @@ import ( "testing" "time" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/pkg/core" @@ -178,11 +179,11 @@ func TestPersistMWBatchIdempotencyAllFail(t *testing.T) { }).Return(nil) mdi.On("InsertDataArray", customCtx, append(m1d, m2d...)).Return(nil) mdi.On("InsertMessages", customCtx, []*core.Message{m1, m2}).Return(fmt.Errorf("various keys were not unique")) - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem1") })).Return([]*core.Message{m1}, nil, nil) - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem2") })).Return([]*core.Message{m2}, nil, nil) @@ -237,11 +238,11 @@ func TestPersistMWBatchHalfFailResubmit(t *testing.T) { }).Return(nil) mdi.On("InsertDataArray", customCtx, append(m1d, m2d...)).Return(nil) mdi.On("InsertMessages", customCtx, []*core.Message{m1, m2}).Return(fmt.Errorf("various keys were not unique")) - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem1") })).Return([]*core.Message{}, nil, nil) // no result - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem2") })).Return([]*core.Message{m2}, nil, nil) // found result @@ -286,7 +287,7 @@ func TestPersistMWBatchFailIdemCheck(t *testing.T) { }).Return(nil) mdi.On("InsertDataArray", customCtx, m1d).Return(nil) mdi.On("InsertMessages", customCtx, []*core.Message{m1}).Return(fmt.Errorf("failure1... which leads to...")) - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem1") })).Return([]*core.Message{}, nil, fmt.Errorf("failure1")) @@ -359,7 +360,7 @@ func TestWriteMessageNoWorkersIdempotencyDuplicate(t *testing.T) { rag.Return(args[1].(func(context.Context) error)(customCtx)) }).Return(nil) mdi.On("InsertMessages", customCtx, []*core.Message{&m1.Message}).Return(fmt.Errorf("failure1... which leads to...")) - mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f database.Filter) bool { + mdi.On("GetMessages", mock.Anything, "ns1", mock.MatchedBy(func(f ffapi.Filter) bool { ff, _ := f.Finalize() return strings.Contains(ff.String(), "idem1") })).Return([]*core.Message{ diff --git a/internal/database/postgres/postgres.go b/internal/database/postgres/postgres.go index b309be23f8..812cefef2b 100644 --- a/internal/database/postgres/postgres.go +++ b/internal/database/postgres/postgres.go @@ -27,6 +27,7 @@ import ( migratedb "github.com/golang-migrate/migrate/v4/database" "github.com/golang-migrate/migrate/v4/database/postgres" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly/internal/database/sqlcommon" "github.com/hyperledger/firefly/pkg/database" @@ -51,6 +52,10 @@ func (psql *Postgres) Name() string { return "postgres" } +func (psql *Postgres) SequenceColumn() string { + return "seq" +} + func (psql *Postgres) MigrationsDir() string { return psql.Name() } @@ -64,8 +69,8 @@ func lockIndex(lockName string) int64 { return big.NewInt(0).SetBytes([]byte(lockName)).Int64() } -func (psql *Postgres) Features() sqlcommon.SQLFeatures { - features := sqlcommon.DefaultSQLProviderFeatures() +func (psql *Postgres) Features() dbsql.SQLFeatures { + features := dbsql.DefaultSQLProviderFeatures() features.PlaceholderFormat = sq.Dollar features.UseILIKE = false // slower than lower() features.AcquireLock = func(lockName string) string { diff --git a/internal/database/postgres/postgres_test.go b/internal/database/postgres/postgres_test.go index d27284732a..18ff0c23f4 100644 --- a/internal/database/postgres/postgres_test.go +++ b/internal/database/postgres/postgres_test.go @@ -39,6 +39,7 @@ func TestPostgresProvider(t *testing.T) { assert.Error(t, err) assert.Equal(t, "postgres", psql.Name()) + assert.Equal(t, "seq", psql.SequenceColumn()) assert.Equal(t, sq.Dollar, psql.Features().PlaceholderFormat) assert.Equal(t, `SELECT pg_advisory_xact_lock(8387236824920056683);`, psql.Features().AcquireLock("test-lock")) assert.Equal(t, `SELECT pg_advisory_xact_lock(116);`, psql.Features().AcquireLock("t")) diff --git a/internal/database/sqlite3/sqlite3.go b/internal/database/sqlite3/sqlite3.go index b8624b593d..3c1680383a 100644 --- a/internal/database/sqlite3/sqlite3.go +++ b/internal/database/sqlite3/sqlite3.go @@ -28,6 +28,7 @@ import ( migratedb "github.com/golang-migrate/migrate/v4/database" migratesqlite3 "github.com/golang-migrate/migrate/v4/database/sqlite3" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly/internal/database/sqlcommon" "github.com/hyperledger/firefly/pkg/database" @@ -73,8 +74,12 @@ func (sqlite *SQLite3) MigrationsDir() string { return "sqlite" } -func (sqlite *SQLite3) Features() sqlcommon.SQLFeatures { - features := sqlcommon.DefaultSQLProviderFeatures() +func (sqlite *SQLite3) SequenceColumn() string { + return "seq" +} + +func (sqlite *SQLite3) Features() dbsql.SQLFeatures { + features := dbsql.DefaultSQLProviderFeatures() features.PlaceholderFormat = sq.Dollar features.UseILIKE = false // Not supported return features diff --git a/internal/database/sqlite3/sqlite3_test.go b/internal/database/sqlite3/sqlite3_test.go index 56c007cffd..09edfc2dd1 100644 --- a/internal/database/sqlite3/sqlite3_test.go +++ b/internal/database/sqlite3/sqlite3_test.go @@ -51,6 +51,7 @@ func TestSQLite3GoProvider(t *testing.T) { conn.Close() assert.Equal(t, "sqlite3", sqlite.Name()) + assert.Equal(t, "seq", sqlite.SequenceColumn()) assert.Equal(t, sq.Dollar, sqlite.Features().PlaceholderFormat) insert := sq.Insert("test").Columns("col1").Values("val1") diff --git a/internal/events/aggregator_test.go b/internal/events/aggregator_test.go index f377924e34..9d48979bb7 100644 --- a/internal/events/aggregator_test.go +++ b/internal/events/aggregator_test.go @@ -25,6 +25,7 @@ import ( "time" "github.com/hyperledger/firefly-common/pkg/config" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly/internal/cache" @@ -312,7 +313,7 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { // Set the pin to dispatched ag.mdi.On("UpdatePins", ag.ctx, "ns1", mock.Anything, mock.Anything).Return(nil) // Update the message - ag.mdi.On("UpdateMessages", ag.ctx, "ns1", mock.Anything, mock.MatchedBy(func(u database.Update) bool { + ag.mdi.On("UpdateMessages", ag.ctx, "ns1", mock.Anything, mock.MatchedBy(func(u ffapi.Update) bool { update, err := u.Finalize() assert.NoError(t, err) assert.Len(t, update.SetOperations, 2) @@ -430,7 +431,7 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { // Update member2 to nonce 1 ag.mdi.On("UpdateNextPin", ag.ctx, "ns1", mock.MatchedBy(func(seq int64) bool { return seq == 424 - }), mock.MatchedBy(func(update database.Update) bool { + }), mock.MatchedBy(func(update ffapi.Update) bool { ui, _ := update.Finalize() assert.Equal(t, "nonce", ui.SetOperations[0].Field) v, _ := ui.SetOperations[0].Value.Value() diff --git a/internal/events/event_poller_test.go b/internal/events/event_poller_test.go index f7d633a8b7..68f39047fc 100644 --- a/internal/events/event_poller_test.go +++ b/internal/events/event_poller_test.go @@ -22,6 +22,7 @@ import ( "testing" "time" + "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/retry" "github.com/hyperledger/firefly/mocks/databasemocks" @@ -48,7 +49,7 @@ func newTestEventPoller(t *testing.T, mdi *databasemocks.Plugin, neh newEventsHa namespace: "unit", offsetName: "test", queryFactory: database.EventQueryFactory, - getItems: func(c context.Context, f database.Filter, o int64) ([]core.LocallySequenced, error) { + getItems: func(c context.Context, f ffapi.Filter, o int64) ([]core.LocallySequenced, error) { events, _, err := mdi.GetEvents(c, "unit", f) ls := make([]core.LocallySequenced, len(events)) for i, e := range events { @@ -57,7 +58,7 @@ func newTestEventPoller(t *testing.T, mdi *databasemocks.Plugin, neh newEventsHa return ls, err }, maybeRewind: rewinder, - addCriteria: func(af database.AndFilter) database.AndFilter { return af }, + addCriteria: func(af ffapi.AndFilter) ffapi.AndFilter { return af }, }) return ep, cancel } @@ -230,7 +231,7 @@ func TestReadPageRewind(t *testing.T) { ep.pollingOffset = 23456 cancel() ev1 := core.NewEvent(core.EventTypeMessageConfirmed, "ns1", fftypes.NewUUID(), nil, "") - mdi.On("GetEvents", mock.Anything, "unit", mock.MatchedBy(func(filter database.Filter) bool { + mdi.On("GetEvents", mock.Anything, "unit", mock.MatchedBy(func(filter ffapi.Filter) bool { f, err := filter.Finalize() assert.NoError(t, err) assert.Equal(t, "sequence", f.Children[0].Field) From 154377ee85b3b5bab3d418de0cb3660f9f6fe779 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 4 Dec 2022 22:24:20 -0500 Subject: [PATCH 4/5] Work through all UTs Signed-off-by: Peter Broadhurst --- go.sum | 14 ----------- internal/blockchain/ethereum/ethereum_test.go | 24 +++++++++++++++++++ internal/data/data_manager_test.go | 4 +++- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/go.sum b/go.sum index e8282e940b..9f251bed65 100644 --- a/go.sum +++ b/go.sum @@ -675,20 +675,6 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/hyperledger/firefly-common v1.1.4 h1:7Oqe7FFOSjt8Uo3/i/ujD4wke2kD2Xr2Kouq4QmgWns= -github.com/hyperledger/firefly-common v1.1.4/go.mod h1:taWRM7vsramcM7iWjeOmIgm3yd+RoWvaXSWxZKpSXM8= -github.com/hyperledger/firefly-common v1.1.5-0.20221203042833-800b9fd4d10a h1:QQj909iAkIYl+eT7hJSzj2+ICo84i3coBc3JrY7Mylg= -github.com/hyperledger/firefly-common v1.1.5-0.20221203042833-800b9fd4d10a/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= -github.com/hyperledger/firefly-common v1.1.5-0.20221205003343-e0ddbf5dc33d h1:VuGCjaBtRvhUg/m9Sl5T1jvIpU6QsugBg6sNucugjGU= -github.com/hyperledger/firefly-common v1.1.5-0.20221205003343-e0ddbf5dc33d/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= -github.com/hyperledger/firefly-common v1.1.5-0.20221205003709-9aa8a5f5bd55 h1:CPWQEodCUg9rSWUj9RTeY6b86SMqo0aaI2mve/zPzAw= -github.com/hyperledger/firefly-common v1.1.5-0.20221205003709-9aa8a5f5bd55/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= -github.com/hyperledger/firefly-common v1.1.5-0.20221205005833-d82dd5500c51 h1:8ZXqf9MvInNdCMjSlXlMUT379VuKyc1dH1t9facqGaA= -github.com/hyperledger/firefly-common v1.1.5-0.20221205005833-d82dd5500c51/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= -github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41 h1:Tbz2Fbyq4fTk5rNXwiHk2W7wqZt1lrf/8wV4KGou2/w= -github.com/hyperledger/firefly-common v1.1.5-0.20221205010550-085676278a41/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= -github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f h1:EfkvaVkVldy0tmJUjN0pnR83DpZYfG1ARR60c+Q4NJ4= -github.com/hyperledger/firefly-common v1.1.5-0.20221205013211-db8cd88a141f/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396 h1:iGT7SpBMGFR2+WlEXa3kYMeIQAFMbdsp9rRWwBk1E4o= github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-signer v1.1.2 h1:QuS3M5w9px3BnPa4jIWMDg+z2ySK76MoO5Egh0G+tFg= diff --git a/internal/blockchain/ethereum/ethereum_test.go b/internal/blockchain/ethereum/ethereum_test.go index 0c205f344d..5c21803982 100644 --- a/internal/blockchain/ethereum/ethereum_test.go +++ b/internal/blockchain/ethereum/ethereum_test.go @@ -2256,6 +2256,30 @@ func TestDeployContractOK(t *testing.T) { assert.NoError(t, err) } +func TestDeployContractFFEC100130(t *testing.T) { + e, cancel := newTestEthereum() + defer cancel() + httpmock.ActivateNonDefault(e.client.GetClient()) + defer httpmock.DeactivateAndReset() + signingKey := ethHexFormatB32(fftypes.NewRandB32()) + input := []interface{}{ + float64(1), + "1000000000000000000000000", + } + options := map[string]interface{}{ + "customOption": "customValue", + } + definitionBytes, err := json.Marshal([]interface{}{}) + contractBytes, err := json.Marshal("0x123456") + assert.NoError(t, err) + httpmock.RegisterResponder("POST", `http://localhost:12345/`, + func(req *http.Request) (*http.Response, error) { + return httpmock.NewJsonResponderOrPanic(500, `{"error":"FFEC100130: failure"}`)(req) + }) + err = e.DeployContract(context.Background(), "", signingKey, fftypes.JSONAnyPtrBytes(definitionBytes), fftypes.JSONAnyPtrBytes(contractBytes), input, options) + assert.Regexp(t, "FF10429", err) +} + func TestDeployContractInvalidOption(t *testing.T) { e, cancel := newTestEthereum() defer cancel() diff --git a/internal/data/data_manager_test.go b/internal/data/data_manager_test.go index 0fb46f9ff7..b01804fc67 100644 --- a/internal/data/data_manager_test.go +++ b/internal/data/data_manager_test.go @@ -32,6 +32,7 @@ import ( "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/pkg/core" "github.com/hyperledger/firefly/pkg/database" + "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -89,7 +90,7 @@ func newTestDataManager(t *testing.T) (*dataManager, context.Context, func()) { ns := &core.Namespace{Name: "ns1", NetworkName: "ns1"} cmi := &cachemocks.Manager{} - cmi.On("GetCache", mock.Anything).Return(cache.NewUmanagedCache(ctx, 100, 5*time.Minute), nil) + cmi.On("GetCache", mock.Anything).Return(cache.NewUmanagedCache(ctx, 10000, 5*time.Minute), nil) dm, err := NewDataManager(ctx, ns, mdi, mdx, cmi) cmi.AssertCalled(t, "GetCache", cache.NewCacheConfig( ctx, @@ -961,6 +962,7 @@ func TestHydrateBatchMsgBadManifest(t *testing.T) { } func TestGetMessageWithDataOk(t *testing.T) { + logrus.SetLevel(logrus.DebugLevel) dm, ctx, cancel := newTestDataManager(t) defer cancel() From 37aed427c25f56eb291314e1a39a06870a088ddc Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Mon, 5 Dec 2022 20:53:00 -0500 Subject: [PATCH 5/5] Update to pull in FF common v1.1.5 Signed-off-by: Peter Broadhurst --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 156bd4f960..6cf739a499 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/golang-migrate/migrate/v4 v4.15.2 github.com/gorilla/mux v1.8.0 github.com/gorilla/websocket v1.5.0 - github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396 + github.com/hyperledger/firefly-common v1.1.5 github.com/hyperledger/firefly-signer v1.1.2 github.com/jarcoal/httpmock v1.2.0 github.com/karlseguin/ccache v2.0.3+incompatible diff --git a/go.sum b/go.sum index 9f251bed65..abfd45b255 100644 --- a/go.sum +++ b/go.sum @@ -675,8 +675,8 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396 h1:iGT7SpBMGFR2+WlEXa3kYMeIQAFMbdsp9rRWwBk1E4o= -github.com/hyperledger/firefly-common v1.1.5-0.20221205020100-299c0d5b3396/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= +github.com/hyperledger/firefly-common v1.1.5 h1:yVOsUqZQoV/iCc/PxYFGdq/438JDdR/TdlCpMOv3XvM= +github.com/hyperledger/firefly-common v1.1.5/go.mod h1:3ubN46/dB+xurCPvdfqMKjB/CJU3I/DsfOoS7dY2SyQ= github.com/hyperledger/firefly-signer v1.1.2 h1:QuS3M5w9px3BnPa4jIWMDg+z2ySK76MoO5Egh0G+tFg= github.com/hyperledger/firefly-signer v1.1.2/go.mod h1:4h2MN910A2knrWGYCT+aWjBDlhptgQn/9WcT1N/Ct8s= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=