diff --git a/Makefile b/Makefile index 08e436d773..2336f7862b 100644 --- a/Makefile +++ b/Makefile @@ -31,43 +31,43 @@ mocks-$(strip $(1))-$(strip $(2)): ${MOCKERY} ${MOCKERY} --case underscore --dir $(1) --name $(2) --outpkg $(3) --output mocks/$(strip $(3)) endef -$(eval $(call makemock, pkg/blockchain, Plugin, blockchainmocks)) -$(eval $(call makemock, pkg/blockchain, Callbacks, blockchainmocks)) -$(eval $(call makemock, pkg/database, Plugin, databasemocks)) -$(eval $(call makemock, pkg/database, Callbacks, databasemocks)) -$(eval $(call makemock, pkg/publicstorage, Plugin, publicstoragemocks)) -$(eval $(call makemock, pkg/publicstorage, Callbacks, publicstoragemocks)) -$(eval $(call makemock, pkg/events, Plugin, eventsmocks)) -$(eval $(call makemock, pkg/events, PluginAll, eventsmocks)) -$(eval $(call makemock, pkg/events, Callbacks, eventsmocks)) -$(eval $(call makemock, pkg/identity, Plugin, identitymocks)) -$(eval $(call makemock, pkg/identity, Callbacks, identitymocks)) -$(eval $(call makemock, pkg/dataexchange, Plugin, dataexchangemocks)) -$(eval $(call makemock, pkg/dataexchange, Callbacks, dataexchangemocks)) -$(eval $(call makemock, pkg/tokens, Plugin, tokenmocks)) -$(eval $(call makemock, pkg/tokens, Callbacks, tokenmocks)) -$(eval $(call makemock, pkg/wsclient, WSClient, wsmocks)) -$(eval $(call makemock, internal/txcommon, Helper, txcommonmocks)) -$(eval $(call makemock, internal/identity, Manager, identitymanagermocks)) -$(eval $(call makemock, internal/batchpin, Submitter, batchpinmocks)) -$(eval $(call makemock, internal/sysmessaging, SystemEvents, sysmessagingmocks)) -$(eval $(call makemock, internal/sysmessaging, MessageSender, sysmessagingmocks)) -$(eval $(call makemock, internal/sysmessaging, LocalNodeInfo, sysmessagingmocks)) -$(eval $(call makemock, internal/syncasync, Bridge, syncasyncmocks)) -$(eval $(call makemock, internal/data, Manager, datamocks)) -$(eval $(call makemock, internal/batch, Manager, batchmocks)) -$(eval $(call makemock, internal/broadcast, Manager, broadcastmocks)) -$(eval $(call makemock, internal/privatemessaging, Manager, privatemessagingmocks)) -$(eval $(call makemock, internal/definitions, DefinitionHandlers, definitionsmocks)) -$(eval $(call makemock, internal/events, EventManager, eventmocks)) -$(eval $(call makemock, internal/networkmap, Manager, networkmapmocks)) -$(eval $(call makemock, internal/assets, Manager, assetmocks)) -$(eval $(call makemock, internal/contracts, Manager, contractmocks)) -$(eval $(call makemock, internal/oapiffi, FFISwaggerGen, oapiffimocks)) -$(eval $(call makemock, internal/orchestrator, Orchestrator, orchestratormocks)) -$(eval $(call makemock, internal/apiserver, Server, apiservermocks)) -$(eval $(call makemock, internal/apiserver, IServer, apiservermocks)) -$(eval $(call makemock, internal/metrics, Manager, metricsmocks)) +$(eval $(call makemock, pkg/blockchain, Plugin, blockchainmocks)) +$(eval $(call makemock, pkg/blockchain, Callbacks, blockchainmocks)) +$(eval $(call makemock, pkg/database, Plugin, databasemocks)) +$(eval $(call makemock, pkg/database, Callbacks, databasemocks)) +$(eval $(call makemock, pkg/publicstorage, Plugin, publicstoragemocks)) +$(eval $(call makemock, pkg/publicstorage, Callbacks, publicstoragemocks)) +$(eval $(call makemock, pkg/events, Plugin, eventsmocks)) +$(eval $(call makemock, pkg/events, PluginAll, eventsmocks)) +$(eval $(call makemock, pkg/events, Callbacks, eventsmocks)) +$(eval $(call makemock, pkg/identity, Plugin, identitymocks)) +$(eval $(call makemock, pkg/identity, Callbacks, identitymocks)) +$(eval $(call makemock, pkg/dataexchange, Plugin, dataexchangemocks)) +$(eval $(call makemock, pkg/dataexchange, Callbacks, dataexchangemocks)) +$(eval $(call makemock, pkg/tokens, Plugin, tokenmocks)) +$(eval $(call makemock, pkg/tokens, Callbacks, tokenmocks)) +$(eval $(call makemock, pkg/wsclient, WSClient, wsmocks)) +$(eval $(call makemock, internal/txcommon, Helper, txcommonmocks)) +$(eval $(call makemock, internal/identity, Manager, identitymanagermocks)) +$(eval $(call makemock, internal/batchpin, Submitter, batchpinmocks)) +$(eval $(call makemock, internal/sysmessaging, SystemEvents, sysmessagingmocks)) +$(eval $(call makemock, internal/sysmessaging, MessageSender, sysmessagingmocks)) +$(eval $(call makemock, internal/sysmessaging, LocalNodeInfo, sysmessagingmocks)) +$(eval $(call makemock, internal/syncasync, Bridge, syncasyncmocks)) +$(eval $(call makemock, internal/data, Manager, datamocks)) +$(eval $(call makemock, internal/batch, Manager, batchmocks)) +$(eval $(call makemock, internal/broadcast, Manager, broadcastmocks)) +$(eval $(call makemock, internal/privatemessaging, Manager, privatemessagingmocks)) +$(eval $(call makemock, internal/contracts, Manager, contractmocks)) +$(eval $(call makemock, internal/assets, Manager, assetmocks)) +$(eval $(call makemock, internal/definitions, DefinitionHandlers, definitionsmocks)) +$(eval $(call makemock, internal/metrics, Manager, metricsmocks)) +$(eval $(call makemock, internal/events, EventManager, eventmocks)) +$(eval $(call makemock, internal/networkmap, Manager, networkmapmocks)) +$(eval $(call makemock, internal/oapiffi, FFISwaggerGen, oapiffimocks)) +$(eval $(call makemock, internal/orchestrator, Orchestrator, orchestratormocks)) +$(eval $(call makemock, internal/apiserver, Server, apiservermocks)) +$(eval $(call makemock, internal/apiserver, IServer, apiservermocks)) firefly-nocgo: ${GOFILES} CGO_ENABLED=0 $(VGO) build -o ${BINARY_NAME}-nocgo -ldflags "-X main.buildDate=`date -u +\"%Y-%m-%dT%H:%M:%SZ\"` -X main.buildVersion=$(BUILD_VERSION)" -tags=prod -tags=prod -v diff --git a/db/migrations/postgres/000063_create_identities_table.down.sql b/db/migrations/postgres/000063_create_identities_table.down.sql new file mode 100644 index 0000000000..5fc97d9c6f --- /dev/null +++ b/db/migrations/postgres/000063_create_identities_table.down.sql @@ -0,0 +1,97 @@ +BEGIN; + +CREATE TABLE orgs ( + seq SERIAL PRIMARY KEY, + id UUID NOT NULL, + message_id UUID NOT NULL, + name VARCHAR(64) NOT NULL, + parent VARCHAR(1024), + identity VARCHAR(1024) NOT NULL, + description VARCHAR(4096) NOT NULL, + profile TEXT, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX orgs_id ON orgs(id); +CREATE UNIQUE INDEX orgs_identity ON orgs(identity); +CREATE UNIQUE INDEX orgs_name ON orgs(name); + +CREATE TABLE nodes ( + seq SERIAL PRIMARY KEY, + id UUID NOT NULL, + message_id UUID NOT NULL, + owner VARCHAR(1024) NOT NULL, + name VARCHAR(64) NOT NULL, + description VARCHAR(4096) NOT NULL, + dx_peer VARCHAR(256), + dx_endpoint TEXT, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX nodes_id ON nodes(id); +CREATE UNIQUE INDEX nodes_owner ON nodes(owner,name); +CREATE UNIQUE INDEX nodes_peer ON nodes(dx_peer); + +-- We only reconstitute orgs that were dropped during the original up migration. +-- These have the UUID of the verifier set to the same UUID as the org. +INSERT INTO orgs ( + id, + parent, + message_id, + name, + description, + profile, + created, + identity + ) SELECT + i.id, + COALESCE(pv.value, '') as parent, + i.messages_claim, + i.name, + i.description, + i.profile, + i.created, + v.value as identity + FROM identities as i + LEFT JOIN verifiers v ON v.hash = REPLACE(i.id::text,'-','') || REPLACE(i.id::text,'-','') + LEFT JOIN verifiers pv ON pv.hash = REPLACE(i.parent::text,'-','') || REPLACE(i.parent::text,'-','') + WHERE i.did LIKE 'did:firefly:org/%' AND v.hash IS NOT NULL; + +-- We only reconstitute nodes that were dropped during the original up migration. +-- These have the Hash of the verifier set to the bytes from the UUID of the node (by taking the string and removing the dashes). +INSERT INTO nodes ( + id, + owner, + message_id, + name, + description, + dx_endpoint, + created, + dx_peer + ) SELECT + i.id, + COALESCE(pv.value, '') as owner, + i.messages_claim, + i.name, + i.description, + i.profile, + i.created, + v.value as dx_peer + FROM identities as i + LEFT JOIN verifiers v ON v.hash = REPLACE(i.id::text,'-','') || REPLACE(i.id::text,'-','') + LEFT JOIN verifiers pv ON pv.hash = REPLACE(i.parent::text,'-','') || REPLACE(i.parent::text,'-','') + WHERE i.did LIKE 'did:firefly:node/%' AND v.hash IS NOT NULL; + +DROP INDEX identities_id; +DROP INDEX identities_did; +DROP INDEX identities_name; + +DROP TABLE IF EXISTS identities; + +DROP INDEX verifiers_hash; +DROP INDEX verifiers_value; +DROP INDEX verifiers_identity; + +DROP TABLE IF EXISTS verifiers; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/postgres/000063_create_identities_table.up.sql b/db/migrations/postgres/000063_create_identities_table.up.sql new file mode 100644 index 0000000000..b53c7e0b8e --- /dev/null +++ b/db/migrations/postgres/000063_create_identities_table.up.sql @@ -0,0 +1,143 @@ +BEGIN; + +CREATE TABLE identities ( + seq SERIAL PRIMARY KEY, + id UUID NOT NULL, + did VARCHAR(256) NOT NULL, + parent UUID, + messages_claim UUID NOT NULL, + messages_verification UUID, + messages_update UUID, + itype VARCHAR(64) NOT NULL, + namespace VARCHAR(64) NOT NULL, + name VARCHAR(64) NOT NULL, + description VARCHAR(4096) NOT NULL, + profile TEXT, + created BIGINT NOT NULL, + updated BIGINT NOT NULL +); + +CREATE UNIQUE INDEX identities_id ON identities(id); +CREATE UNIQUE INDEX identities_did ON identities(did); +CREATE UNIQUE INDEX identities_name ON identities(itype, namespace, name); + +CREATE TABLE verifiers ( + seq SERIAL PRIMARY KEY, + hash CHAR(64) NOT NULL, + identity UUID NOT NULL, + vtype VARCHAR(256) NOT NULL, + namespace VARCHAR(64) NOT NULL, + value TEXT NOT NULL, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX verifiers_hash ON verifiers(hash); +CREATE UNIQUE INDEX verifiers_value ON verifiers(vtype, namespace, value); +CREATE INDEX verifiers_identity ON verifiers(identity); + +INSERT INTO identities ( + id, + did, + parent, + messages_claim, + itype, + namespace, + name, + description, + profile, + created, + updated + ) SELECT + o1.id, + 'did:firefly:org/' || o1.name, + o2.id, + o1.message_id, + 'org', + 'ff_system', + o1.name, + o1.description, + o1.profile, + o1.created, + o1.created + FROM orgs as o1 + LEFT JOIN orgs o2 ON o2.identity = o1.parent; + +INSERT INTO identities ( + id, + did, + parent, + messages_claim, + itype, + namespace, + name, + description, + profile, + created, + updated + ) SELECT + n.id, + 'did:firefly:node/' || n.name, + o.id, + n.message_id, + 'node', + 'ff_system', + n.name, + n.description, + n.dx_endpoint, + n.created, + n.created + FROM nodes as n + LEFT JOIN orgs o ON o.identity = n.owner; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(o.id::text, '-', '') || REPLACE(o.id::text, '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + o.id, + 'ethereum_address', + o.identity, + o.created + FROM orgs as o WHERE o.identity LIKE '0x%'; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(o.id::text, '-', '') || REPLACE(o.id::text, '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + o.id, + 'fabric_msp_id', + o.identity, + o.created + FROM orgs as o WHERE o.identity NOT LIKE '0x%'; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(n.id::text, '-', '') || REPLACE(n.id::text, '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + n.id, + 'dx_peer_id', + n.dx_peer, + n.created + FROM nodes as n; + +DROP TABLE orgs; +DROP TABLE nodes; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/postgres/000064_allow_null_data.down.sql b/db/migrations/postgres/000064_allow_null_data.down.sql new file mode 100644 index 0000000000..d2fe991c9e --- /dev/null +++ b/db/migrations/postgres/000064_allow_null_data.down.sql @@ -0,0 +1 @@ +-- No down migration for this one diff --git a/db/migrations/postgres/000064_allow_null_data.up.sql b/db/migrations/postgres/000064_allow_null_data.up.sql new file mode 100644 index 0000000000..9c9d72001b --- /dev/null +++ b/db/migrations/postgres/000064_allow_null_data.up.sql @@ -0,0 +1,5 @@ +BEGIN; + +ALTER TABLE data ALTER COLUMN value DROP NOT NULL; + +COMMIT; \ No newline at end of file diff --git a/db/migrations/postgres/000065_pin_signer_event_correlator.down.sql b/db/migrations/postgres/000065_pin_signer_event_correlator.down.sql new file mode 100644 index 0000000000..a75a963262 --- /dev/null +++ b/db/migrations/postgres/000065_pin_signer_event_correlator.down.sql @@ -0,0 +1,4 @@ +BEGIN; +ALTER TABLE pins DROP COLUMN signer; +ALTER TABLE events DROP COLUMN cid; +COMMIT; diff --git a/db/migrations/postgres/000065_pin_signer_event_correlator.up.sql b/db/migrations/postgres/000065_pin_signer_event_correlator.up.sql new file mode 100644 index 0000000000..99d7afcbac --- /dev/null +++ b/db/migrations/postgres/000065_pin_signer_event_correlator.up.sql @@ -0,0 +1,6 @@ +BEGIN; +ALTER TABLE pins ADD COLUMN signer TEXT; +UPDATE pins SET signer = ''; + +ALTER TABLE events ADD COLUMN cid UUID; +COMMIT; diff --git a/db/migrations/sqlite/000063_create_identities_table.down.sql b/db/migrations/sqlite/000063_create_identities_table.down.sql new file mode 100644 index 0000000000..1703138561 --- /dev/null +++ b/db/migrations/sqlite/000063_create_identities_table.down.sql @@ -0,0 +1,93 @@ +CREATE TABLE orgs ( + seq INTEGER PRIMARY KEY AUTOINCREMENT, + id UUID NOT NULL, + message_id UUID NOT NULL, + name VARCHAR(64) NOT NULL, + parent VARCHAR(1024), + identity VARCHAR(1024) NOT NULL, + description VARCHAR(4096) NOT NULL, + profile TEXT, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX orgs_id ON orgs(id); +CREATE UNIQUE INDEX orgs_identity ON orgs(identity); +CREATE UNIQUE INDEX orgs_name ON orgs(name); + +CREATE TABLE nodes ( + seq INTEGER PRIMARY KEY AUTOINCREMENT, + id UUID NOT NULL, + message_id UUID NOT NULL, + owner VARCHAR(1024) NOT NULL, + name VARCHAR(64) NOT NULL, + description VARCHAR(4096) NOT NULL, + dx_peer VARCHAR(256), + dx_endpoint TEXT, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX nodes_id ON nodes(id); +CREATE UNIQUE INDEX nodes_owner ON nodes(owner,name); +CREATE UNIQUE INDEX nodes_peer ON nodes(dx_peer); + +-- We only reconstitute orgs that were dropped during the original up migration. +-- These have the UUID of the verifier set to the same UUID as the org. +INSERT INTO orgs ( + id, + parent, + message_id, + name, + description, + profile, + created, + identity + ) SELECT + i.id, + COALESCE(pv.value, '') as parent, + i.messages_claim, + i.name, + i.description, + i.profile, + i.created, + v.value as identity + FROM identities as i + LEFT JOIN verifiers v ON v.hash = REPLACE(hex(i.id),'-','') || REPLACE(hex(i.id),'-','') + LEFT JOIN verifiers pv ON pv.hash = REPLACE(hex(i.parent),'-','') || REPLACE(hex(i.parent),'-','') + WHERE i.did LIKE 'did:firefly:org/%' AND v.hash IS NOT NULL; + +-- We only reconstitute nodes that were dropped during the original up migration. +-- These have the Hash of the verifier set to the bytes from the UUID of the node (by taking the string and removing the dashes). +INSERT INTO nodes ( + id, + owner, + message_id, + name, + description, + dx_endpoint, + created, + dx_peer + ) SELECT + i.id, + COALESCE(pv.value, '') as owner, + i.messages_claim, + i.name, + i.description, + i.profile, + i.created, + v.value as dx_peer + FROM identities as i + LEFT JOIN verifiers v ON v.hash = REPLACE(hex(i.id),'-','') || REPLACE(hex(i.id),'-','') + LEFT JOIN verifiers pv ON pv.hash = REPLACE(hex(i.parent),'-','') || REPLACE(hex(i.parent),'-','') + WHERE i.did LIKE 'did:firefly:node/%' AND v.hash IS NOT NULL; + +DROP INDEX identities_id; +DROP INDEX identities_did; +DROP INDEX identities_name; + +DROP TABLE IF EXISTS identities; + +DROP INDEX verifiers_hash; +DROP INDEX verifiers_value; +DROP INDEX verifiers_identity; + +DROP TABLE IF EXISTS verifiers; diff --git a/db/migrations/sqlite/000063_create_identities_table.up.sql b/db/migrations/sqlite/000063_create_identities_table.up.sql new file mode 100644 index 0000000000..3ca8a8ce12 --- /dev/null +++ b/db/migrations/sqlite/000063_create_identities_table.up.sql @@ -0,0 +1,135 @@ +CREATE TABLE identities ( + seq INTEGER PRIMARY KEY AUTOINCREMENT, + id UUID NOT NULL, + did VARCHAR(256) NOT NULL, + parent UUID, + messages_claim UUID NOT NULL, + messages_verification UUID, + messages_update UUID, + itype VARCHAR(64) NOT NULL, + namespace VARCHAR(64) NOT NULL, + name VARCHAR(64) NOT NULL, + description VARCHAR(4096) NOT NULL, + profile TEXT, + created BIGINT NOT NULL, + updated BIGINT NOT NULL +); + +CREATE UNIQUE INDEX identities_id ON identities(id); +CREATE UNIQUE INDEX identities_did ON identities(did); +CREATE UNIQUE INDEX identities_name ON identities(itype, namespace, name); + +CREATE TABLE verifiers ( + seq INTEGER PRIMARY KEY AUTOINCREMENT, + hash CHAR(64) NOT NULL, + identity UUID NOT NULL, + vtype VARCHAR(256) NOT NULL, + namespace VARCHAR(64) NOT NULL, + value TEXT NOT NULL, + created BIGINT NOT NULL +); + +CREATE UNIQUE INDEX verifiers_hash ON verifiers(hash); +CREATE UNIQUE INDEX verifiers_value ON verifiers(vtype, value); +CREATE INDEX verifiers_identity ON verifiers(identity); + +INSERT INTO identities ( + id, + did, + parent, + messages_claim, + namespace, + name, + description, + profile, + created, + updated + ) SELECT + o1.id, + 'did:firefly:org/' || o1.name, + o2.id, + o1.message_id, + 'ff_system', + o1.name, + o1.description, + o1.profile, + o1.created, + o1.created + FROM orgs as o1 + LEFT JOIN orgs o2 ON o2.identity = o1.parent; + +INSERT INTO identities ( + id, + did, + parent, + messages_claim, + namespace, + name, + description, + profile, + created, + updated + ) SELECT + n.id, + 'did:firefly:node/' || n.name, + o.id, + n.message_id, + 'ff_system', + n.name, + n.description, + n.dx_endpoint, + n.created, + n.created + FROM nodes as n + LEFT JOIN orgs o ON o.identity = n.owner; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(hex(o.id), '-', '') || REPLACE(hex(o.id), '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + o.id, + 'ethereum_address', + o.identity, + o.created + FROM orgs as o WHERE o.identity LIKE '0x%'; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(hex(o.id), '-', '') || REPLACE(hex(o.id), '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + o.id, + 'fabric_msp_id', + o.identity, + o.created + FROM orgs as o WHERE o.identity NOT LIKE '0x%'; + +INSERT INTO verifiers ( + hash, + namespace, + identity, + vtype, + value, + created + ) SELECT + REPLACE(hex(n.id), '-', '') || REPLACE(hex(n.id), '-', ''), -- to avoid the need for hashing in the migration, use the convenient fact the UUID is known hex - have to write it twice to fill the 32B -- + 'ff_system', + n.id, + 'dx_peer_id', + n.dx_peer, + n.created + FROM nodes as n; + +DROP TABLE orgs; +DROP TABLE nodes; diff --git a/db/migrations/sqlite/000064_allow_null_data.down.sql b/db/migrations/sqlite/000064_allow_null_data.down.sql new file mode 100644 index 0000000000..d2fe991c9e --- /dev/null +++ b/db/migrations/sqlite/000064_allow_null_data.down.sql @@ -0,0 +1 @@ +-- No down migration for this one diff --git a/db/migrations/sqlite/000064_allow_null_data.up.sql b/db/migrations/sqlite/000064_allow_null_data.up.sql new file mode 100644 index 0000000000..72a3af4dfb --- /dev/null +++ b/db/migrations/sqlite/000064_allow_null_data.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE data RENAME COLUMN value TO value_old; +ALTER TABLE data ADD COLUMN value TEXT; +UPDATE data SET value = value_old; +ALTER TABLE data DROP COLUMN value_old; diff --git a/db/migrations/sqlite/000065_pin_signer_event_correlator.down.sql b/db/migrations/sqlite/000065_pin_signer_event_correlator.down.sql new file mode 100644 index 0000000000..fbed51669a --- /dev/null +++ b/db/migrations/sqlite/000065_pin_signer_event_correlator.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE pins DROP COLUMN signer; +ALTER TABLE events DROP COLUMN cid; diff --git a/db/migrations/sqlite/000065_pin_signer_event_correlator.up.sql b/db/migrations/sqlite/000065_pin_signer_event_correlator.up.sql new file mode 100644 index 0000000000..9d796de004 --- /dev/null +++ b/db/migrations/sqlite/000065_pin_signer_event_correlator.up.sql @@ -0,0 +1,4 @@ +ALTER TABLE pins ADD COLUMN signer TEXT; +UPDATE pins SET signer = ''; + +ALTER TABLE events ADD COLUMN cid UUID; diff --git a/docs/swagger/swagger.yaml b/docs/swagger/swagger.yaml index 451e7740b1..0e1a7ac0d5 100644 --- a/docs/swagger/swagger.yaml +++ b/docs/swagger/swagger.yaml @@ -3900,6 +3900,11 @@ paths: schema: default: 120s type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: correlator + schema: + type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query name: created @@ -3975,6 +3980,7 @@ paths: application/json: schema: properties: + correlator: {} created: {} id: {} namespace: @@ -3991,7 +3997,8 @@ paths: - message_rejected - namespace_confirmed - datatype_confirmed - - group_confirmed + - identity_confirmed + - identity_updated - token_pool_confirmed - token_transfer_confirmed - token_transfer_op_failed @@ -4036,6 +4043,7 @@ paths: application/json: schema: properties: + correlator: {} created: {} id: {} namespace: @@ -4052,7 +4060,8 @@ paths: - message_rejected - namespace_confirmed - datatype_confirmed - - group_confirmed + - identity_confirmed + - identity_updated - token_pool_confirmed - token_transfer_confirmed - token_transfer_op_failed @@ -4185,44 +4194,541 @@ paths: name: ns required: true schema: - example: default + example: default + type: string + - description: 'TODO: Description' + in: path + name: groupid + required: true + schema: + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + responses: + "200": + content: + application/json: + schema: + properties: + created: {} + hash: {} + ledger: {} + members: + items: + properties: + identity: + type: string + node: {} + type: object + type: array + message: {} + name: + type: string + namespace: + type: string + type: object + description: Success + default: + description: "" + /namespaces/{ns}/identities: + get: + description: 'TODO: Description' + operationId: getIdentities + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: created + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: description + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: did + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: id + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: messages.claim + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: messages.update + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: messages.verification + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: name + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: namespace + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: parent + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: profile + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: updated + schema: + type: string + - description: Sort field. For multi-field sort use comma separated values (or + multiple query values) with '-' prefix for descending + in: query + name: sort + schema: + type: string + - description: Ascending sort order (overrides all fields in a multi-field sort) + in: query + name: ascending + schema: + type: string + - description: Descending sort order (overrides all fields in a multi-field + sort) + in: query + name: descending + schema: + type: string + - description: 'The number of records to skip (max: 1,000). Unsuitable for bulk + operations' + in: query + name: skip + schema: + type: string + - description: 'The maximum number of records to return (max: 1,000)' + in: query + name: limit + schema: + example: "25" + type: string + - description: Return a total count as well as items (adds extra database processing) + in: query + name: count + schema: + type: string + responses: + "200": + content: + application/json: + schema: + items: + properties: + created: {} + description: + type: string + did: + type: string + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object + name: + type: string + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} + type: object + type: array + description: Success + default: + description: "" + post: + description: 'TODO: Description' + operationId: postNewIdentity + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: When true the HTTP request blocks until the message is confirmed + in: query + name: confirm + schema: + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + requestBody: + content: + application/json: + schema: + type: object + responses: + "200": + content: + application/json: + schema: + properties: + created: {} + description: + type: string + did: + type: string + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object + name: + type: string + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} + type: object + description: Success + "202": + content: + application/json: + schema: + properties: + created: {} + description: + type: string + did: + type: string + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object + name: + type: string + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} + type: object + description: Success + default: + description: "" + /namespaces/{ns}/identities/{iid}: + get: + description: 'TODO: Description' + operationId: getIdentityByID + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: 'TODO: Description' + in: path + name: iid + required: true + schema: + example: id + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + responses: + "200": + content: + application/json: + schema: + properties: + created: {} + description: + type: string + did: + type: string + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object + name: + type: string + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} + type: object + description: Success + default: + description: "" + /namespaces/{ns}/identities/{iid}/did: + get: + description: 'TODO: Description' + operationId: getIdentityDID + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: 'TODO: Description' + in: path + name: iid + required: true + schema: + example: id + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + responses: + "200": + content: + application/json: + schema: + properties: + '@context': + items: + type: string + type: array + authentication: + items: + type: string + type: array + id: + type: string + verificationMethod: + items: + properties: + blockchainAcountId: + type: string + controller: + type: string + dataExchangePeerID: + type: string + id: + type: string + mspIdentityString: + type: string + type: + type: string + type: object + type: array + type: object + description: Success + default: + description: "" + /namespaces/{ns}/identities/{iid}/verifiers: + get: + description: 'TODO: Description' + operationId: getIdentityVerifiers + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: 'TODO: Description' + in: path + name: iid + required: true + schema: + example: id + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: created + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: hash + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: identity + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: namespace + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: type: string - - description: 'TODO: Description' - in: path - name: groupid - required: true + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: value schema: type: string - - description: Server-side request timeout (millseconds, or set a custom suffix - like 10s) - in: header - name: Request-Timeout + - description: Sort field. For multi-field sort use comma separated values (or + multiple query values) with '-' prefix for descending + in: query + name: sort + schema: + type: string + - description: Ascending sort order (overrides all fields in a multi-field sort) + in: query + name: ascending + schema: + type: string + - description: Descending sort order (overrides all fields in a multi-field + sort) + in: query + name: descending + schema: + type: string + - description: 'The number of records to skip (max: 1,000). Unsuitable for bulk + operations' + in: query + name: skip + schema: + type: string + - description: 'The maximum number of records to return (max: 1,000)' + in: query + name: limit + schema: + example: "25" + type: string + - description: Return a total count as well as items (adds extra database processing) + in: query + name: count schema: - default: 120s type: string responses: "200": content: application/json: schema: - properties: - created: {} - hash: {} - ledger: {} - members: - items: - properties: - identity: - type: string - node: {} - type: object - type: array - message: {} - name: - type: string - namespace: - type: string - type: object + items: + properties: + created: {} + hash: {} + identity: {} + namespace: + type: string + type: + enum: + - ethereum_address + - fabric_msp_id + - dx_peer_id + type: string + value: + type: string + type: object + type: array description: Success default: description: "" @@ -4629,6 +5135,11 @@ paths: schema: default: 120s type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: correlator + schema: + type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query name: created @@ -4704,6 +5215,7 @@ paths: application/json: schema: properties: + correlator: {} created: {} id: {} namespace: @@ -4720,7 +5232,8 @@ paths: - message_rejected - namespace_confirmed - datatype_confirmed - - group_confirmed + - identity_confirmed + - identity_updated - token_pool_confirmed - token_transfer_confirmed - token_transfer_op_failed @@ -8446,20 +8959,200 @@ paths: name: ns required: true schema: - example: default + example: default + type: string + - description: 'TODO: Description' + in: path + name: txnid + required: true + schema: + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + responses: + "200": + content: + application/json: + schema: + items: + properties: + created: {} + error: + type: string + id: {} + input: + additionalProperties: {} + type: object + namespace: + type: string + output: + additionalProperties: {} + type: object + plugin: + type: string + status: + type: string + tx: {} + type: + enum: + - blockchain_batch_pin + - blockchain_invoke + - publicstorage_batch_broadcast + - dataexchange_batch_send + - dataexchange_blob_send + - token_create_pool + - token_activate_pool + - token_transfer + - token_approval + type: string + updated: {} + type: object + type: array + description: Success + default: + description: "" + /namespaces/{ns}/transactions/{txnid}/status: + get: + description: 'TODO: Description' + operationId: getTxnStatus + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: 'TODO: Description' + in: path + name: txnid + required: true + schema: + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + responses: + "200": + content: + application/json: + schema: + properties: + details: + items: + properties: + error: + type: string + id: {} + info: + additionalProperties: {} + type: object + status: + type: string + subtype: + type: string + timestamp: {} + type: + type: string + type: object + type: array + status: + type: string + type: object + description: Success + default: + description: "" + /namespaces/{ns}/verifiers: + get: + description: 'TODO: Description' + operationId: getVerifiers + parameters: + - description: 'TODO: Description' + in: path + name: ns + required: true + schema: + example: default + type: string + - description: Server-side request timeout (millseconds, or set a custom suffix + like 10s) + in: header + name: Request-Timeout + schema: + default: 120s + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: created + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: hash + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: identity + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: namespace + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: value + schema: + type: string + - description: Sort field. For multi-field sort use comma separated values (or + multiple query values) with '-' prefix for descending + in: query + name: sort + schema: + type: string + - description: Ascending sort order (overrides all fields in a multi-field sort) + in: query + name: ascending + schema: + type: string + - description: Descending sort order (overrides all fields in a multi-field + sort) + in: query + name: descending + schema: + type: string + - description: 'The number of records to skip (max: 1,000). Unsuitable for bulk + operations' + in: query + name: skip + schema: type: string - - description: 'TODO: Description' - in: path - name: txnid - required: true + - description: 'The maximum number of records to return (max: 1,000)' + in: query + name: limit schema: + example: "25" type: string - - description: Server-side request timeout (millseconds, or set a custom suffix - like 10s) - in: header - name: Request-Timeout + - description: Return a total count as well as items (adds extra database processing) + in: query + name: count schema: - default: 120s type: string responses: "200": @@ -8469,44 +9162,27 @@ paths: items: properties: created: {} - error: - type: string - id: {} - input: - additionalProperties: {} - type: object + hash: {} + identity: {} namespace: type: string - output: - additionalProperties: {} - type: object - plugin: - type: string - status: - type: string - tx: {} type: enum: - - blockchain_batch_pin - - blockchain_invoke - - publicstorage_batch_broadcast - - dataexchange_batch_send - - dataexchange_blob_send - - token_create_pool - - token_activate_pool - - token_transfer - - token_approval + - ethereum_address + - fabric_msp_id + - dx_peer_id + type: string + value: type: string - updated: {} type: object type: array description: Success default: description: "" - /namespaces/{ns}/transactions/{txnid}/status: + /namespaces/{ns}/verifiers/{vid}: get: description: 'TODO: Description' - operationId: getTxnStatus + operationId: getVerifierByID parameters: - description: 'TODO: Description' in: path @@ -8517,9 +9193,10 @@ paths: type: string - description: 'TODO: Description' in: path - name: txnid + name: vid required: true schema: + example: id type: string - description: Server-side request timeout (millseconds, or set a custom suffix like 10s) @@ -8534,25 +9211,18 @@ paths: application/json: schema: properties: - details: - items: - properties: - error: - type: string - id: {} - info: - additionalProperties: {} - type: object - status: - type: string - subtype: - type: string - timestamp: {} - type: - type: string - type: object - type: array - status: + created: {} + hash: {} + identity: {} + namespace: + type: string + type: + enum: + - ethereum_address + - fabric_msp_id + - dx_peer_id + type: string + value: type: string type: object description: Success @@ -8582,22 +9252,27 @@ paths: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: dx.endpoint + name: did schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: dx.peer + name: id schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: id + name: messages.claim schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: message + name: messages.update + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: messages.verification schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' @@ -8607,7 +9282,27 @@ paths: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: owner + name: namespace + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: parent + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: profile + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: updated schema: type: string - description: Sort field. For multi-field sort use comma separated values (or @@ -8653,20 +9348,30 @@ paths: created: {} description: type: string - dx: + did: + type: string + id: {} + messages: properties: - endpoint: - additionalProperties: {} - type: object - peer: - type: string + claim: {} + update: {} + verification: {} type: object - id: {} - message: {} name: type: string - owner: + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom type: string + updated: {} type: object description: Success default: @@ -8698,20 +9403,30 @@ paths: created: {} description: type: string - dx: + did: + type: string + id: {} + messages: properties: - endpoint: - additionalProperties: {} - type: object - peer: - type: string + claim: {} + update: {} + verification: {} type: object - id: {} - message: {} name: type: string - owner: + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom type: string + updated: {} type: object description: Success default: @@ -8748,20 +9463,30 @@ paths: created: {} description: type: string - dx: + did: + type: string + id: {} + messages: properties: - endpoint: - additionalProperties: {} - type: object - peer: - type: string + claim: {} + update: {} + verification: {} type: object - id: {} - message: {} name: type: string - owner: + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom type: string + updated: {} type: object description: Success "202": @@ -8772,20 +9497,30 @@ paths: created: {} description: type: string - dx: + did: + type: string + id: {} + messages: properties: - endpoint: - additionalProperties: {} - type: object - peer: - type: string + claim: {} + update: {} + verification: {} type: object - id: {} - message: {} name: type: string - owner: + namespace: + type: string + parent: {} + profile: + additionalProperties: {} + type: object + type: + enum: + - org + - node + - custom type: string + updated: {} type: object description: Success default: @@ -8812,6 +9547,11 @@ paths: name: description schema: type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: did + schema: + type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query name: id @@ -8819,12 +9559,27 @@ paths: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: identity + name: messages.claim schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' in: query - name: message + name: messages.update + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: messages.verification + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: name + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: namespace schema: type: string - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' @@ -8837,6 +9592,16 @@ paths: name: profile schema: type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: type + schema: + type: string + - description: 'Data filter field. Prefixes supported: > >= < <= @ ^ ! !@ !^' + in: query + name: updated + schema: + type: string - description: Sort field. For multi-field sort use comma separated values (or multiple query values) with '-' prefix for descending in: query @@ -8880,17 +9645,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success default: @@ -8919,12 +9697,11 @@ paths: properties: description: type: string - identity: + key: type: string name: type: string - parent: - type: string + parent: {} profile: additionalProperties: {} type: object @@ -8938,17 +9715,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success "202": @@ -8959,17 +9749,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success default: @@ -9001,17 +9804,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success default: @@ -9048,17 +9864,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success "202": @@ -9069,17 +9898,30 @@ paths: created: {} description: type: string - id: {} - identity: + did: type: string - message: {} + id: {} + messages: + properties: + claim: {} + update: {} + verification: {} + type: object name: type: string - parent: + namespace: type: string + parent: {} profile: additionalProperties: {} type: object + type: + enum: + - org + - node + - custom + type: string + updated: {} type: object description: Success default: @@ -9117,9 +9959,9 @@ paths: type: object org: properties: - id: {} - identity: + did: type: string + id: {} name: type: string registered: diff --git a/internal/apiserver/route_get_identities.go b/internal/apiserver/route_get_identities.go new file mode 100644 index 0000000000..329c4302f6 --- /dev/null +++ b/internal/apiserver/route_get_identities.go @@ -0,0 +1,45 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var getIdentities = &oapispec.Route{ + Name: "getIdentities", + Path: "namespaces/{ns}/identities", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + }, + QueryParams: nil, + FilterFactory: database.IdentityQueryFactory, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &[]*fftypes.Identity{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return filterResult(getOr(r.Ctx).NetworkMap().GetIdentities(r.Ctx, r.PP["ns"], r.Filter)) + }, +} diff --git a/internal/apiserver/route_get_identities_test.go b/internal/apiserver/route_get_identities_test.go new file mode 100644 index 0000000000..847ad9d3e9 --- /dev/null +++ b/internal/apiserver/route_get_identities_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetIdentities(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/identities", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetIdentities", mock.Anything, "ns1", mock.Anything).Return([]*fftypes.Identity{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_get_identity_by_id.go b/internal/apiserver/route_get_identity_by_id.go new file mode 100644 index 0000000000..95532e335c --- /dev/null +++ b/internal/apiserver/route_get_identity_by_id.go @@ -0,0 +1,44 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var getIdentityByID = &oapispec.Route{ + Name: "getIdentityByID", + Path: "namespaces/{ns}/identities/{iid}", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + {Name: "iid", Example: "id", Description: i18n.MsgTBD}, + }, + QueryParams: nil, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return getOr(r.Ctx).NetworkMap().GetIdentityByID(r.Ctx, r.PP["ns"], r.PP["iid"]) + }, +} diff --git a/internal/apiserver/route_get_identity_by_id_test.go b/internal/apiserver/route_get_identity_by_id_test.go new file mode 100644 index 0000000000..83a511dc4d --- /dev/null +++ b/internal/apiserver/route_get_identity_by_id_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetIdentityByID(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/identities/id1", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetIdentityByID", mock.Anything, "ns1", "id1").Return(&fftypes.Identity{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_get_identity_did.go b/internal/apiserver/route_get_identity_did.go new file mode 100644 index 0000000000..6995049ad5 --- /dev/null +++ b/internal/apiserver/route_get_identity_did.go @@ -0,0 +1,44 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/networkmap" + "github.com/hyperledger/firefly/internal/oapispec" +) + +var getIdentityDID = &oapispec.Route{ + Name: "getIdentityDID", + Path: "namespaces/{ns}/identities/{iid}/did", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + {Name: "iid", Example: "id", Description: i18n.MsgTBD}, + }, + QueryParams: nil, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &networkmap.DIDDocument{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return getOr(r.Ctx).NetworkMap().GetDIDDocForIndentityByID(r.Ctx, r.PP["ns"], r.PP["iid"]) + }, +} diff --git a/internal/apiserver/route_get_identity_did_test.go b/internal/apiserver/route_get_identity_did_test.go new file mode 100644 index 0000000000..1ac5b38c1e --- /dev/null +++ b/internal/apiserver/route_get_identity_did_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/internal/networkmap" + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetIdentityDID(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/identities/id1/did", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetDIDDocForIndentityByID", mock.Anything, "ns1", "id1").Return(&networkmap.DIDDocument{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_get_identity_verifiers.go b/internal/apiserver/route_get_identity_verifiers.go new file mode 100644 index 0000000000..186814e585 --- /dev/null +++ b/internal/apiserver/route_get_identity_verifiers.go @@ -0,0 +1,46 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var getIdentityVerifiers = &oapispec.Route{ + Name: "getIdentityVerifiers", + Path: "namespaces/{ns}/identities/{iid}/verifiers", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + {Name: "iid", Example: "id", Description: i18n.MsgTBD}, + }, + QueryParams: nil, + FilterFactory: database.VerifierQueryFactory, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &[]*fftypes.Verifier{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return filterResult(getOr(r.Ctx).NetworkMap().GetIdentityVerifiers(r.Ctx, r.PP["ns"], r.PP["iid"], r.Filter)) + }, +} diff --git a/internal/apiserver/route_get_identity_verifiers_test.go b/internal/apiserver/route_get_identity_verifiers_test.go new file mode 100644 index 0000000000..39453abae1 --- /dev/null +++ b/internal/apiserver/route_get_identity_verifiers_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetIdentityVerifiers(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/identities/id1/verifiers", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetIdentityVerifiers", mock.Anything, "ns1", "id1", mock.Anything).Return([]*fftypes.Verifier{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_get_net_node.go b/internal/apiserver/route_get_net_node.go index 521a5d4a57..3554f35a5a 100644 --- a/internal/apiserver/route_get_net_node.go +++ b/internal/apiserver/route_get_net_node.go @@ -35,7 +35,7 @@ var getNetworkNode = &oapispec.Route{ FilterFactory: nil, Description: i18n.MsgTBD, JSONInputValue: nil, - JSONOutputValue: func() interface{} { return &fftypes.Node{} }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { output, err = getOr(r.Ctx).NetworkMap().GetNodeByID(r.Ctx, r.PP["nid"]) diff --git a/internal/apiserver/route_get_net_node_test.go b/internal/apiserver/route_get_net_node_test.go index f94baa7200..194e8cabc5 100644 --- a/internal/apiserver/route_get_net_node_test.go +++ b/internal/apiserver/route_get_net_node_test.go @@ -35,7 +35,7 @@ func TestGetNode(t *testing.T) { res := httptest.NewRecorder() nmn.On("GetNodeByID", mock.Anything, "node12345"). - Return(&fftypes.Node{}, nil) + Return(&fftypes.Identity{}, nil) r.ServeHTTP(res, req) assert.Equal(t, 200, res.Result().StatusCode) diff --git a/internal/apiserver/route_get_net_nodes.go b/internal/apiserver/route_get_net_nodes.go index 932db708a2..c6378b5fe9 100644 --- a/internal/apiserver/route_get_net_nodes.go +++ b/internal/apiserver/route_get_net_nodes.go @@ -31,10 +31,10 @@ var getNetworkNodes = &oapispec.Route{ Method: http.MethodGet, PathParams: nil, QueryParams: nil, - FilterFactory: database.NodeQueryFactory, + FilterFactory: database.IdentityQueryFactory, Description: i18n.MsgTBD, JSONInputValue: nil, - JSONOutputValue: func() interface{} { return []*fftypes.Node{} }, + JSONOutputValue: func() interface{} { return []*fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { return filterResult(getOr(r.Ctx).NetworkMap().GetNodes(r.Ctx, r.Filter)) diff --git a/internal/apiserver/route_get_net_nodes_test.go b/internal/apiserver/route_get_net_nodes_test.go index 187a2e7298..5660a03994 100644 --- a/internal/apiserver/route_get_net_nodes_test.go +++ b/internal/apiserver/route_get_net_nodes_test.go @@ -35,7 +35,7 @@ func TestGetNodess(t *testing.T) { res := httptest.NewRecorder() mnm.On("GetNodes", mock.Anything, mock.Anything). - Return([]*fftypes.Node{}, nil, nil) + Return([]*fftypes.Identity{}, nil, nil) r.ServeHTTP(res, req) assert.Equal(t, 200, res.Result().StatusCode) diff --git a/internal/apiserver/route_get_net_org.go b/internal/apiserver/route_get_net_org.go index 126dcd532e..2d8d1b68d3 100644 --- a/internal/apiserver/route_get_net_org.go +++ b/internal/apiserver/route_get_net_org.go @@ -35,7 +35,7 @@ var getNetworkOrg = &oapispec.Route{ FilterFactory: nil, Description: i18n.MsgTBD, JSONInputValue: nil, - JSONOutputValue: func() interface{} { return &fftypes.Organization{} }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { output, err = getOr(r.Ctx).NetworkMap().GetOrganizationByID(r.Ctx, r.PP["oid"]) diff --git a/internal/apiserver/route_get_net_org_test.go b/internal/apiserver/route_get_net_org_test.go index abe4a2320b..9dcbebd9ad 100644 --- a/internal/apiserver/route_get_net_org_test.go +++ b/internal/apiserver/route_get_net_org_test.go @@ -35,7 +35,7 @@ func TestGetOrg(t *testing.T) { res := httptest.NewRecorder() nmn.On("GetOrganizationByID", mock.Anything, "org12345"). - Return(&fftypes.Organization{}, nil) + Return(&fftypes.Identity{}, nil) r.ServeHTTP(res, req) assert.Equal(t, 200, res.Result().StatusCode) diff --git a/internal/apiserver/route_get_net_orgs.go b/internal/apiserver/route_get_net_orgs.go index f6e5af80b1..7816c3b5c8 100644 --- a/internal/apiserver/route_get_net_orgs.go +++ b/internal/apiserver/route_get_net_orgs.go @@ -31,10 +31,10 @@ var getNetworkOrgs = &oapispec.Route{ Method: http.MethodGet, PathParams: nil, QueryParams: nil, - FilterFactory: database.OrganizationQueryFactory, + FilterFactory: database.IdentityQueryFactory, Description: i18n.MsgTBD, JSONInputValue: nil, - JSONOutputValue: func() interface{} { return []*fftypes.Organization{} }, + JSONOutputValue: func() interface{} { return []*fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { return filterResult(getOr(r.Ctx).NetworkMap().GetOrganizations(r.Ctx, r.Filter)) diff --git a/internal/apiserver/route_get_net_orgs_test.go b/internal/apiserver/route_get_net_orgs_test.go index e78cab227d..34c5cde74c 100644 --- a/internal/apiserver/route_get_net_orgs_test.go +++ b/internal/apiserver/route_get_net_orgs_test.go @@ -35,7 +35,7 @@ func TestGetOrganizations(t *testing.T) { res := httptest.NewRecorder() mnm.On("GetOrganizations", mock.Anything, mock.Anything). - Return([]*fftypes.Organization{}, nil, nil) + Return([]*fftypes.Identity{}, nil, nil) r.ServeHTTP(res, req) assert.Equal(t, 200, res.Result().StatusCode) diff --git a/internal/apiserver/route_get_verifier_by_id.go b/internal/apiserver/route_get_verifier_by_id.go new file mode 100644 index 0000000000..6b73ef95bb --- /dev/null +++ b/internal/apiserver/route_get_verifier_by_id.go @@ -0,0 +1,44 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var getVerifierByID = &oapispec.Route{ + Name: "getVerifierByID", + Path: "namespaces/{ns}/verifiers/{vid}", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + {Name: "vid", Example: "id", Description: i18n.MsgTBD}, + }, + QueryParams: nil, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &fftypes.Verifier{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return getOr(r.Ctx).NetworkMap().GetVerifierByHash(r.Ctx, r.PP["ns"], r.PP["vid"]) + }, +} diff --git a/internal/apiserver/route_get_verifier_by_id_test.go b/internal/apiserver/route_get_verifier_by_id_test.go new file mode 100644 index 0000000000..589a99cc6d --- /dev/null +++ b/internal/apiserver/route_get_verifier_by_id_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetVerifierByID(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/verifiers/hashid1", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetVerifierByHash", mock.Anything, "ns1", "hashid1").Return(&fftypes.Verifier{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_get_verifiers.go b/internal/apiserver/route_get_verifiers.go new file mode 100644 index 0000000000..9b86cd3c0f --- /dev/null +++ b/internal/apiserver/route_get_verifiers.go @@ -0,0 +1,45 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var getVerifiers = &oapispec.Route{ + Name: "getVerifiers", + Path: "namespaces/{ns}/verifiers", + Method: http.MethodGet, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + }, + QueryParams: nil, + FilterFactory: database.VerifierQueryFactory, + Description: i18n.MsgTBD, + JSONInputValue: nil, + JSONOutputValue: func() interface{} { return &[]*fftypes.Verifier{} }, + JSONOutputCodes: []int{http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + return filterResult(getOr(r.Ctx).NetworkMap().GetVerifiers(r.Ctx, r.PP["ns"], r.Filter)) + }, +} diff --git a/internal/apiserver/route_get_verifiers_test.go b/internal/apiserver/route_get_verifiers_test.go new file mode 100644 index 0000000000..608f279d82 --- /dev/null +++ b/internal/apiserver/route_get_verifiers_test.go @@ -0,0 +1,41 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestGetVerifiers(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + req := httptest.NewRequest("GET", "/api/v1/namespaces/ns1/verifiers", nil) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("GetVerifiers", mock.Anything, "ns1", mock.Anything).Return([]*fftypes.Verifier{}, nil, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 200, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_patch_update_identity.go b/internal/apiserver/route_patch_update_identity.go new file mode 100644 index 0000000000..cfa853e709 --- /dev/null +++ b/internal/apiserver/route_patch_update_identity.go @@ -0,0 +1,54 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "context" + "net/http" + "strings" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var patchUpdateIdentity = &oapispec.Route{ + Name: "patchUpdateIdentity", + Path: "namespaces/{ns}/identities/{iid}", + Method: http.MethodPatch, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + {Name: "iid", Example: "id", Description: i18n.MsgTBD}, + }, + QueryParams: []*oapispec.QueryParam{ + {Name: "confirm", Description: i18n.MsgConfirmQueryParam, IsBool: true}, + }, + FilterFactory: nil, + Description: i18n.MsgTBD, + JSONInputValue: func() interface{} { return &fftypes.IdentityUpdateDTO{} }, + JSONInputMask: nil, + JSONInputSchema: func(ctx context.Context) string { return emptyObjectSchema }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, + JSONOutputCodes: []int{http.StatusAccepted, http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + waitConfirm := strings.EqualFold(r.QP["confirm"], "true") + r.SuccessStatus = syncRetcode(waitConfirm) + org, err := getOr(r.Ctx).NetworkMap().UpdateIdentity(r.Ctx, r.PP["ns"], r.PP["iid"], r.Input.(*fftypes.IdentityUpdateDTO), waitConfirm) + return org, err + }, +} diff --git a/internal/apiserver/route_patch_update_identity_test.go b/internal/apiserver/route_patch_update_identity_test.go new file mode 100644 index 0000000000..3c6cfd0431 --- /dev/null +++ b/internal/apiserver/route_patch_update_identity_test.go @@ -0,0 +1,47 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "bytes" + "encoding/json" + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestUpdateIdentity(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + input := fftypes.Identity{} + var buf bytes.Buffer + json.NewEncoder(&buf).Encode(&input) + req := httptest.NewRequest("PATCH", "/api/v1/namespaces/ns1/identities/id1", &buf) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("UpdateIdentity", mock.Anything, "ns1", "id1", mock.AnythingOfType("*fftypes.IdentityUpdateDTO"), false). + Return(&fftypes.Identity{}, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 202, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_post_new_identity.go b/internal/apiserver/route_post_new_identity.go new file mode 100644 index 0000000000..891f06df18 --- /dev/null +++ b/internal/apiserver/route_post_new_identity.go @@ -0,0 +1,53 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "context" + "net/http" + "strings" + + "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/oapispec" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var postNewIdentity = &oapispec.Route{ + Name: "postNewIdentity", + Path: "namespaces/{ns}/identities", + Method: http.MethodPost, + PathParams: []*oapispec.PathParam{ + {Name: "ns", ExampleFromConf: config.NamespacesDefault, Description: i18n.MsgTBD}, + }, + QueryParams: []*oapispec.QueryParam{ + {Name: "confirm", Description: i18n.MsgConfirmQueryParam, IsBool: true}, + }, + FilterFactory: nil, + Description: i18n.MsgTBD, + JSONInputValue: func() interface{} { return &fftypes.IdentityCreateDTO{} }, + JSONInputMask: nil, + JSONInputSchema: func(ctx context.Context) string { return emptyObjectSchema }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, + JSONOutputCodes: []int{http.StatusAccepted, http.StatusOK}, + JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { + waitConfirm := strings.EqualFold(r.QP["confirm"], "true") + r.SuccessStatus = syncRetcode(waitConfirm) + org, err := getOr(r.Ctx).NetworkMap().RegisterIdentity(r.Ctx, r.PP["ns"], r.Input.(*fftypes.IdentityCreateDTO), waitConfirm) + return org, err + }, +} diff --git a/internal/apiserver/route_post_new_identity_test.go b/internal/apiserver/route_post_new_identity_test.go new file mode 100644 index 0000000000..cd9783a416 --- /dev/null +++ b/internal/apiserver/route_post_new_identity_test.go @@ -0,0 +1,47 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiserver + +import ( + "bytes" + "encoding/json" + "net/http/httptest" + "testing" + + "github.com/hyperledger/firefly/mocks/networkmapmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestNewIdentity(t *testing.T) { + o, r := newTestAPIServer() + mnm := &networkmapmocks.Manager{} + o.On("NetworkMap").Return(mnm) + input := fftypes.Identity{} + var buf bytes.Buffer + json.NewEncoder(&buf).Encode(&input) + req := httptest.NewRequest("POST", "/api/v1/namespaces/ns1/identities", &buf) + req.Header.Set("Content-Type", "application/json; charset=utf-8") + res := httptest.NewRecorder() + + mnm.On("RegisterIdentity", mock.Anything, "ns1", mock.AnythingOfType("*fftypes.IdentityCreateDTO"), false). + Return(&fftypes.Identity{}, nil) + r.ServeHTTP(res, req) + + assert.Equal(t, 202, res.Result().StatusCode) +} diff --git a/internal/apiserver/route_post_new_node_self.go b/internal/apiserver/route_post_new_node_self.go index 3608a75e49..8d62f9bea8 100644 --- a/internal/apiserver/route_post_new_node_self.go +++ b/internal/apiserver/route_post_new_node_self.go @@ -39,12 +39,12 @@ var postNodesSelf = &oapispec.Route{ JSONInputValue: func() interface{} { return &fftypes.EmptyInput{} }, JSONInputMask: nil, JSONInputSchema: func(ctx context.Context) string { return emptyObjectSchema }, - JSONOutputValue: func() interface{} { return &fftypes.Node{} }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusAccepted, http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { waitConfirm := strings.EqualFold(r.QP["confirm"], "true") r.SuccessStatus = syncRetcode(waitConfirm) - node, _, err := getOr(r.Ctx).NetworkMap().RegisterNode(r.Ctx, waitConfirm) + node, err := getOr(r.Ctx).NetworkMap().RegisterNode(r.Ctx, waitConfirm) return node, err }, } diff --git a/internal/apiserver/route_post_new_node_self_test.go b/internal/apiserver/route_post_new_node_self_test.go index f1fc28e1f3..ee313b840a 100644 --- a/internal/apiserver/route_post_new_node_self_test.go +++ b/internal/apiserver/route_post_new_node_self_test.go @@ -40,7 +40,7 @@ func TestPostNewNodeSelf(t *testing.T) { res := httptest.NewRecorder() mnm.On("RegisterNode", mock.Anything, false). - Return(&fftypes.Node{}, &fftypes.Message{}, nil) + Return(&fftypes.Identity{}, nil) r.ServeHTTP(res, req) assert.Equal(t, 202, res.Result().StatusCode) diff --git a/internal/apiserver/route_post_new_organization.go b/internal/apiserver/route_post_new_organization.go index e9133f149d..a9bff5b8d4 100644 --- a/internal/apiserver/route_post_new_organization.go +++ b/internal/apiserver/route_post_new_organization.go @@ -35,14 +35,14 @@ var postNewOrganization = &oapispec.Route{ }, FilterFactory: nil, Description: i18n.MsgTBD, - JSONInputValue: func() interface{} { return &fftypes.Organization{} }, + JSONInputValue: func() interface{} { return &fftypes.IdentityCreateDTO{} }, JSONInputMask: []string{"ID", "Created", "Message", "Type"}, - JSONOutputValue: func() interface{} { return &fftypes.Organization{} }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusAccepted, http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { waitConfirm := strings.EqualFold(r.QP["confirm"], "true") r.SuccessStatus = syncRetcode(waitConfirm) - _, err = getOr(r.Ctx).NetworkMap().RegisterOrganization(r.Ctx, r.Input.(*fftypes.Organization), waitConfirm) + _, err = getOr(r.Ctx).NetworkMap().RegisterOrganization(r.Ctx, r.Input.(*fftypes.IdentityCreateDTO), waitConfirm) return r.Input, err }, } diff --git a/internal/apiserver/route_post_new_organization_self.go b/internal/apiserver/route_post_new_organization_self.go index fde41a0640..607b31f798 100644 --- a/internal/apiserver/route_post_new_organization_self.go +++ b/internal/apiserver/route_post_new_organization_self.go @@ -39,12 +39,12 @@ var postNewOrganizationSelf = &oapispec.Route{ JSONInputValue: func() interface{} { return &fftypes.EmptyInput{} }, JSONInputMask: nil, JSONInputSchema: func(ctx context.Context) string { return emptyObjectSchema }, - JSONOutputValue: func() interface{} { return &fftypes.Organization{} }, + JSONOutputValue: func() interface{} { return &fftypes.Identity{} }, JSONOutputCodes: []int{http.StatusAccepted, http.StatusOK}, JSONHandler: func(r *oapispec.APIRequest) (output interface{}, err error) { waitConfirm := strings.EqualFold(r.QP["confirm"], "true") r.SuccessStatus = syncRetcode(waitConfirm) - org, _, err := getOr(r.Ctx).NetworkMap().RegisterNodeOrganization(r.Ctx, waitConfirm) + org, err := getOr(r.Ctx).NetworkMap().RegisterNodeOrganization(r.Ctx, waitConfirm) return org, err }, } diff --git a/internal/apiserver/route_post_new_organization_self_test.go b/internal/apiserver/route_post_new_organization_self_test.go index a33c0f4343..961e2ae410 100644 --- a/internal/apiserver/route_post_new_organization_self_test.go +++ b/internal/apiserver/route_post_new_organization_self_test.go @@ -40,7 +40,7 @@ func TestNewOrganizationSelf(t *testing.T) { res := httptest.NewRecorder() mnm.On("RegisterNodeOrganization", mock.Anything, false). - Return(&fftypes.Organization{}, &fftypes.Message{}, nil) + Return(&fftypes.Identity{}, nil) r.ServeHTTP(res, req) assert.Equal(t, 202, res.Result().StatusCode) diff --git a/internal/apiserver/route_post_new_organization_test.go b/internal/apiserver/route_post_new_organization_test.go index ecd8035238..464022d965 100644 --- a/internal/apiserver/route_post_new_organization_test.go +++ b/internal/apiserver/route_post_new_organization_test.go @@ -32,15 +32,15 @@ func TestNewOrganization(t *testing.T) { o, r := newTestAPIServer() mnm := &networkmapmocks.Manager{} o.On("NetworkMap").Return(mnm) - input := fftypes.Organization{} + input := fftypes.Identity{} var buf bytes.Buffer json.NewEncoder(&buf).Encode(&input) req := httptest.NewRequest("POST", "/api/v1/network/organizations", &buf) req.Header.Set("Content-Type", "application/json; charset=utf-8") res := httptest.NewRecorder() - mnm.On("RegisterOrganization", mock.Anything, mock.AnythingOfType("*fftypes.Organization"), false). - Return(&fftypes.Message{}, nil) + mnm.On("RegisterOrganization", mock.Anything, mock.AnythingOfType("*fftypes.IdentityCreateDTO"), false). + Return(&fftypes.Identity{}, nil) r.ServeHTTP(res, req) assert.Equal(t, 202, res.Result().StatusCode) diff --git a/internal/apiserver/routes.go b/internal/apiserver/routes.go index 7372a122ea..7c9a98b913 100644 --- a/internal/apiserver/routes.go +++ b/internal/apiserver/routes.go @@ -29,6 +29,8 @@ var routes = []*oapispec.Route{ postNodesSelf, postNewOrganization, postNewOrganizationSelf, + postNewIdentity, + patchUpdateIdentity, postData, postNewSubscription, @@ -49,6 +51,10 @@ var routes = []*oapispec.Route{ getEvents, getGroups, getGroupByHash, + getIdentities, + getIdentityByID, + getIdentityDID, + getIdentityVerifiers, getMsgByID, getMsgData, getMsgEvents, @@ -72,6 +78,8 @@ var routes = []*oapispec.Route{ getTxnBlockchainEvents, getTxnStatus, getTxns, + getVerifiers, + getVerifierByID, getChartHistogram, diff --git a/internal/assets/manager.go b/internal/assets/manager.go index 33bdccb5f7..e9be352892 100644 --- a/internal/assets/manager.go +++ b/internal/assets/manager.go @@ -65,17 +65,18 @@ type Manager interface { } type assetManager struct { - ctx context.Context - database database.Plugin - txHelper txcommon.Helper - identity identity.Manager - data data.Manager - syncasync syncasync.Bridge - broadcast broadcast.Manager - messaging privatemessaging.Manager - tokens map[string]tokens.Plugin - retry retry.Retry - metrics metrics.Manager + ctx context.Context + database database.Plugin + txHelper txcommon.Helper + identity identity.Manager + data data.Manager + syncasync syncasync.Bridge + broadcast broadcast.Manager + messaging privatemessaging.Manager + tokens map[string]tokens.Plugin + retry retry.Retry + metrics metrics.Manager + keyNormalization int } func NewAssetManager(ctx context.Context, di database.Plugin, im identity.Manager, dm data.Manager, sa syncasync.Bridge, bm broadcast.Manager, pm privatemessaging.Manager, ti map[string]tokens.Plugin, mm metrics.Manager) (Manager, error) { @@ -97,7 +98,8 @@ func NewAssetManager(ctx context.Context, di database.Plugin, im identity.Manage MaximumDelay: config.GetDuration(config.AssetManagerRetryMaxDelay), Factor: config.GetFloat64(config.AssetManagerRetryFactor), }, - metrics: mm, + keyNormalization: identity.ParseKeyNormalizationConfig(config.GetString(config.AssetManagerKeyNormalization)), + metrics: mm, } return am, nil } diff --git a/internal/assets/token_approval.go b/internal/assets/token_approval.go index 5cbffc4a6c..8fd66a0550 100644 --- a/internal/assets/token_approval.go +++ b/internal/assets/token_approval.go @@ -136,7 +136,7 @@ func (s *approveSender) sendInternal(ctx context.Context, method sendMethod) err return err } -func (am *assetManager) validateApproval(ctx context.Context, ns string, approval *fftypes.TokenApprovalInput) error { +func (am *assetManager) validateApproval(ctx context.Context, ns string, approval *fftypes.TokenApprovalInput) (err error) { if approval.Connector == "" { connector, err := am.getTokenConnectorName(ctx, ns) if err != nil { @@ -151,13 +151,6 @@ func (am *assetManager) validateApproval(ctx context.Context, ns string, approva } approval.Pool = pool } - if approval.Key == "" { - org, err := am.identity.GetLocalOrganization(ctx) - if err != nil { - return err - } - approval.Key = org.Identity - } - - return nil + approval.Key, err = am.identity.NormalizeSigningKey(ctx, approval.Key, am.keyNormalization) + return err } diff --git a/internal/assets/token_approval_test.go b/internal/assets/token_approval_test.go index a1ce113578..096363c9f9 100644 --- a/internal/assets/token_approval_test.go +++ b/internal/assets/token_approval_test.go @@ -19,6 +19,7 @@ import ( "fmt" "testing" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/internal/syncasync" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/datamocks" @@ -66,7 +67,7 @@ func TestTokenApprovalSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TokensApproval", context.Background(), mock.Anything, "F1", &approval.TokenApproval).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(fftypes.NewUUID(), nil) @@ -96,7 +97,7 @@ func TestTokenApprovalSuccessUnknownIdentity(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TokensApproval", context.Background(), mock.Anything, "F1", &approval.TokenApproval).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(fftypes.NewUUID(), nil) @@ -122,7 +123,7 @@ func TestApprovalUnknownConnectorNoConnectors(t *testing.T) { am.tokens = make(map[string]tokens.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) assert.Regexp(t, "FF10292", err) @@ -145,7 +146,7 @@ func TestApprovalUnknownConnectorMultipleConnectors(t *testing.T) { am.tokens["magic-tokens2"] = nil mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) assert.Regexp(t, "FF10292", err) @@ -167,7 +168,7 @@ func TestApprovalUnknownConnectorBadNamespace(t *testing.T) { am.tokens = make(map[string]tokens.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TokenApproval(context.Background(), "", approval, false) assert.Regexp(t, "FF10131", err) @@ -188,7 +189,7 @@ func TestApprovalBadConnector(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) assert.Regexp(t, "FF10272", err) @@ -224,7 +225,7 @@ func TestApprovalUnknownPoolSuccess(t *testing.T) { filterResult := &database.FilterResult{ TotalCount: &totalCount, } - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.MatchedBy((func(f database.AndFilter) bool { info, _ := f.Finalize() return info.Count && info.Limit == 1 @@ -260,7 +261,7 @@ func TestApprovalUnknownPoolNoPool(t *testing.T) { filterResult := &database.FilterResult{ TotalCount: &totalCount, } - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.MatchedBy((func(f database.AndFilter) bool { info, _ := f.Finalize() return info.Count && info.Limit == 1 @@ -285,7 +286,7 @@ func TestApprovalBadPool(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(nil, fmt.Errorf("pop")) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) @@ -310,7 +311,7 @@ func TestApprovalUnconfirmedPool(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) @@ -333,7 +334,7 @@ func TestApprovalIdentityFail(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(nil, fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := am.TokenApproval(context.Background(), "ns1", approval, false) assert.EqualError(t, err, "pop") @@ -360,7 +361,7 @@ func TestApprovalFail(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TokensApproval", context.Background(), mock.Anything, "F1", &approval.TokenApproval).Return(fmt.Errorf("pop")) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(fftypes.NewUUID(), nil) @@ -390,7 +391,7 @@ func TestApprovalTransactionFail(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(nil, fmt.Errorf("pop")) @@ -422,7 +423,7 @@ func TestApprovalFailAndDbFail(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TokensApproval", context.Background(), mock.Anything, "F1", &approval.TokenApproval).Return(fmt.Errorf("pop")) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(nil) @@ -454,7 +455,7 @@ func TestApprovalOperationsFail(t *testing.T) { mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(fmt.Errorf("pop")) @@ -486,7 +487,7 @@ func TestTokenApprovalConfirm(t *testing.T) { mdm := am.data.(*datamocks.Manager) msa := am.syncasync.(*syncasyncmocks.Bridge) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "key", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TokensApproval", context.Background(), mock.Anything, "F1", &approval.TokenApproval).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenApproval).Return(fftypes.NewUUID(), nil) diff --git a/internal/assets/token_pool.go b/internal/assets/token_pool.go index fe26dd0b9f..108753e8b8 100644 --- a/internal/assets/token_pool.go +++ b/internal/assets/token_pool.go @@ -43,12 +43,10 @@ func (am *assetManager) CreateTokenPool(ctx context.Context, ns string, pool *ff pool.Connector = connector } - if pool.Key == "" { - org, err := am.identity.GetLocalOrganization(ctx) - if err != nil { - return nil, err - } - pool.Key = org.Identity + var err error + pool.Key, err = am.identity.NormalizeSigningKey(ctx, pool.Key, am.keyNormalization) + if err != nil { + return nil, err } return am.createTokenPoolInternal(ctx, pool, waitConfirm) } diff --git a/internal/assets/token_pool_test.go b/internal/assets/token_pool_test.go index 4cc7fc2615..6635edaedd 100644 --- a/internal/assets/token_pool_test.go +++ b/internal/assets/token_pool_test.go @@ -19,6 +19,7 @@ import ( "fmt" "testing" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/internal/syncasync" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/datamocks" @@ -59,7 +60,7 @@ func TestCreateTokenPoolUnknownConnectorSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("resolved-key", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mti.On("CreateTokenPool", context.Background(), mock.Anything, mock.Anything, mock.Anything).Return(false, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) @@ -119,7 +120,7 @@ func TestCreateTokenPoolMissingNamespace(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil).Times(2) mti.On("CreateTokenPool", context.Background(), mock.Anything, mock.Anything).Return(false, nil).Times(1) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) @@ -162,7 +163,7 @@ func TestCreateTokenPoolIdentityFail(t *testing.T) { mdm := am.data.(*datamocks.Manager) mim := am.identity.(*identitymanagermocks.Manager) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) - mim.On("GetLocalOrganization", context.Background()).Return(nil, fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := am.CreateTokenPool(context.Background(), "ns1", pool, false) assert.EqualError(t, err, "pop") @@ -180,7 +181,7 @@ func TestCreateTokenPoolWrongConnector(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mdm := am.data.(*datamocks.Manager) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mdi.On("UpsertTransaction", context.Background(), mock.MatchedBy(func(tx *fftypes.Transaction) bool { return tx.Type == fftypes.TransactionTypeTokenPool @@ -205,7 +206,7 @@ func TestCreateTokenPoolFail(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(nil) @@ -228,7 +229,7 @@ func TestCreateTokenPoolTransactionFail(t *testing.T) { mdm := am.data.(*datamocks.Manager) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(nil, fmt.Errorf("pop")) @@ -249,7 +250,7 @@ func TestCreateTokenPoolOpInsertFail(t *testing.T) { mdm := am.data.(*datamocks.Manager) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(fmt.Errorf("pop")) @@ -272,7 +273,7 @@ func TestCreateTokenPoolSyncSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(nil) @@ -297,7 +298,7 @@ func TestCreateTokenPoolAsyncSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil) mti.On("CreateTokenPool", context.Background(), mock.Anything, mock.Anything, mock.Anything).Return(false, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) @@ -322,7 +323,7 @@ func TestCreateTokenPoolConfirm(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdm.On("VerifyNamespaceExists", context.Background(), "ns1").Return(nil).Times(2) mti.On("CreateTokenPool", context.Background(), mock.Anything, mock.Anything).Return(false, nil).Times(1) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenPool).Return(fftypes.NewUUID(), nil) diff --git a/internal/assets/token_transfer.go b/internal/assets/token_transfer.go index 75a21cca06..435d9172c6 100644 --- a/internal/assets/token_transfer.go +++ b/internal/assets/token_transfer.go @@ -86,7 +86,7 @@ func (s *transferSender) setDefaults() { s.transfer.LocalID = fftypes.NewUUID() } -func (am *assetManager) validateTransfer(ctx context.Context, ns string, transfer *fftypes.TokenTransferInput) error { +func (am *assetManager) validateTransfer(ctx context.Context, ns string, transfer *fftypes.TokenTransferInput) (err error) { if transfer.Connector == "" { connector, err := am.getTokenConnectorName(ctx, ns) if err != nil { @@ -101,12 +101,8 @@ func (am *assetManager) validateTransfer(ctx context.Context, ns string, transfe } transfer.Pool = pool } - if transfer.Key == "" { - org, err := am.identity.GetLocalOrganization(ctx) - if err != nil { - return err - } - transfer.Key = org.Identity + if transfer.Key, err = am.identity.NormalizeSigningKey(ctx, transfer.Key, am.keyNormalization); err != nil { + return err } if transfer.From == "" { transfer.From = transfer.Key diff --git a/internal/assets/token_transfer_test.go b/internal/assets/token_transfer_test.go index cb54c3ded2..b89c10749e 100644 --- a/internal/assets/token_transfer_test.go +++ b/internal/assets/token_transfer_test.go @@ -19,6 +19,7 @@ import ( "fmt" "testing" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/internal/syncasync" "github.com/hyperledger/firefly/mocks/broadcastmocks" "github.com/hyperledger/firefly/mocks/databasemocks" @@ -86,7 +87,7 @@ func TestMintTokensSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("MintTokens", context.Background(), mock.Anything, "F1", &mint.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -115,7 +116,7 @@ func TestMintTokenUnknownConnectorSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("MintTokens", context.Background(), mock.Anything, "F1", &mint.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -139,7 +140,7 @@ func TestMintTokenUnknownConnectorNoConnectors(t *testing.T) { am.tokens = make(map[string]tokens.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "ns1", mint, false) assert.Regexp(t, "FF10292", err) @@ -160,7 +161,7 @@ func TestMintTokenUnknownConnectorMultipleConnectors(t *testing.T) { am.tokens["magic-tokens2"] = nil mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "ns1", mint, false) assert.Regexp(t, "FF10292", err) @@ -178,7 +179,7 @@ func TestMintTokenUnknownConnectorBadNamespace(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "", mint, false) assert.Regexp(t, "FF10131", err) @@ -197,7 +198,7 @@ func TestMintTokenBadConnector(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "ns1", mint, false) assert.Regexp(t, "FF10272", err) @@ -231,7 +232,7 @@ func TestMintTokenUnknownPoolSuccess(t *testing.T) { filterResult := &database.FilterResult{ TotalCount: &totalCount, } - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.MatchedBy((func(f database.AndFilter) bool { info, _ := f.Finalize() return info.Count && info.Limit == 1 @@ -265,7 +266,7 @@ func TestMintTokenUnknownPoolNoPools(t *testing.T) { filterResult := &database.FilterResult{ TotalCount: &totalCount, } - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.MatchedBy((func(f database.AndFilter) bool { info, _ := f.Finalize() return info.Count && info.Limit == 1 @@ -302,12 +303,12 @@ func TestMintTokenUnknownPoolMultiplePools(t *testing.T) { filterResult := &database.FilterResult{ TotalCount: &totalCount, } - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.MatchedBy((func(f database.AndFilter) bool { info, _ := f.Finalize() return info.Count && info.Limit == 1 }))).Return(tokenPools, filterResult, nil) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "ns1", mint, false) assert.Regexp(t, "FF10292", err) @@ -324,7 +325,7 @@ func TestMintTokenUnknownPoolBadNamespace(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.MintTokens(context.Background(), "", mint, false) assert.Regexp(t, "FF10131", err) @@ -342,7 +343,7 @@ func TestMintTokensGetPoolsError(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPools", context.Background(), mock.Anything).Return(nil, nil, fmt.Errorf("pop")) _, err := am.MintTokens(context.Background(), "ns1", mint, false) @@ -362,7 +363,7 @@ func TestMintTokensBadPool(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(nil, fmt.Errorf("pop")) _, err := am.MintTokens(context.Background(), "ns1", mint, false) @@ -381,7 +382,7 @@ func TestMintTokensIdentityFail(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(nil, fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := am.MintTokens(context.Background(), "ns1", mint, false) assert.EqualError(t, err, "pop") @@ -406,7 +407,7 @@ func TestMintTokensFail(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("MintTokens", context.Background(), mock.Anything, "F1", &mint.TokenTransfer).Return(fmt.Errorf("pop")) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -436,7 +437,7 @@ func TestMintTokensFailAndDbFail(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("MintTokens", context.Background(), mock.Anything, "F1", &mint.TokenTransfer).Return(fmt.Errorf("pop")) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(nil) @@ -465,7 +466,7 @@ func TestMintTokensOperationFail(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(fmt.Errorf("pop")) @@ -495,7 +496,7 @@ func TestMintTokensConfirm(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("MintTokens", context.Background(), mock.Anything, "F1", &mint.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -535,7 +536,7 @@ func TestBurnTokensSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("BurnTokens", context.Background(), mock.Anything, "F1", &burn.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -561,7 +562,7 @@ func TestBurnTokensIdentityFail(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(nil, fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := am.BurnTokens(context.Background(), "ns1", burn, false) assert.EqualError(t, err, "pop") @@ -588,7 +589,7 @@ func TestBurnTokensConfirm(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("BurnTokens", context.Background(), mock.Anything, "F1", &burn.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -630,7 +631,7 @@ func TestTransferTokensSuccess(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TransferTokens", context.Background(), mock.Anything, "F1", &transfer.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -663,7 +664,7 @@ func TestTransferTokensUnconfirmedPool(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) _, err := am.TransferTokens(context.Background(), "ns1", transfer, false) @@ -687,7 +688,7 @@ func TestTransferTokensIdentityFail(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(nil, fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := am.TransferTokens(context.Background(), "ns1", transfer, false) assert.EqualError(t, err, "pop") @@ -702,7 +703,7 @@ func TestTransferTokensNoFromOrTo(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TransferTokens(context.Background(), "ns1", transfer, false) assert.Regexp(t, "FF10280", err) @@ -764,7 +765,7 @@ func TestTransferTokensTransactionFail(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(nil, fmt.Errorf("pop")) @@ -813,7 +814,7 @@ func TestTransferTokensWithBroadcastMessage(t *testing.T) { mbm := am.broadcast.(*broadcastmocks.Manager) mms := &sysmessagingmocks.MessageSender{} mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TransferTokens", context.Background(), mock.Anything, "F1", &transfer.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -859,7 +860,7 @@ func TestTransferTokensWithBroadcastPrepareFail(t *testing.T) { mim := am.identity.(*identitymanagermocks.Manager) mbm := am.broadcast.(*broadcastmocks.Manager) mms := &sysmessagingmocks.MessageSender{} - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mbm.On("NewBroadcast", "ns1", transfer.Message).Return(mms) mms.On("Prepare", context.Background()).Return(fmt.Errorf("pop")) @@ -910,7 +911,7 @@ func TestTransferTokensWithPrivateMessage(t *testing.T) { mpm := am.messaging.(*privatemessagingmocks.Manager) mms := &sysmessagingmocks.MessageSender{} mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TransferTokens", context.Background(), mock.Anything, "F1", &transfer.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -959,7 +960,7 @@ func TestTransferTokensWithInvalidMessage(t *testing.T) { } mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) _, err := am.TransferTokens(context.Background(), "ns1", transfer, false) assert.Regexp(t, "FF10287", err) @@ -990,7 +991,7 @@ func TestTransferTokensConfirm(t *testing.T) { mti := am.tokens["magic-tokens"].(*tokenmocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TransferTokens", context.Background(), mock.Anything, "F1", &transfer.TokenTransfer).Return(nil) mth.On("SubmitNewTransaction", context.Background(), "ns1", fftypes.TransactionTypeTokenTransfer).Return(fftypes.NewUUID(), nil) @@ -1050,7 +1051,7 @@ func TestTransferTokensWithBroadcastConfirm(t *testing.T) { mms := &sysmessagingmocks.MessageSender{} msa := am.syncasync.(*syncasyncmocks.Bridge) mth := am.txHelper.(*txcommonmocks.Helper) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(pool, nil) mti.On("TransferTokens", context.Background(), mock.Anything, "F1", &transfer.TokenTransfer).Return(nil) mdi.On("InsertOperation", context.Background(), mock.Anything).Return(nil) @@ -1101,7 +1102,7 @@ func TestTransferTokensPoolNotFound(t *testing.T) { mdi := am.database.(*databasemocks.Plugin) mim := am.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrganization", context.Background()).Return(&fftypes.Organization{Identity: "0x12345"}, nil) + mim.On("NormalizeSigningKey", context.Background(), "", identity.KeyNormalizationBlockchainPlugin).Return("0x12345", nil) mdi.On("GetTokenPool", context.Background(), "ns1", "pool1").Return(nil, nil) _, err := am.TransferTokens(context.Background(), "ns1", transfer, false) diff --git a/internal/batch/batch_manager.go b/internal/batch/batch_manager.go index d3342119aa..239f5cc277 100644 --- a/internal/batch/batch_manager.go +++ b/internal/batch/batch_manager.go @@ -112,7 +112,7 @@ type dispatcher struct { options DispatcherOptions } -func (bm *batchManager) getProcessorKey(namespace string, identity *fftypes.Identity, groupID *fftypes.Bytes32) string { +func (bm *batchManager) getProcessorKey(namespace string, identity *fftypes.SignerRef, groupID *fftypes.Bytes32) string { return fmt.Sprintf("%s|%s|%v", namespace, identity.Author, groupID) } @@ -141,7 +141,7 @@ func (bm *batchManager) NewMessages() chan<- int64 { return bm.newMessages } -func (bm *batchManager) getProcessor(txType fftypes.TransactionType, msgType fftypes.MessageType, group *fftypes.Bytes32, namespace string, identity *fftypes.Identity) (*batchProcessor, error) { +func (bm *batchManager) getProcessor(txType fftypes.TransactionType, msgType fftypes.MessageType, group *fftypes.Bytes32, namespace string, identity *fftypes.SignerRef) (*batchProcessor, error) { bm.dispatcherMux.Lock() defer bm.dispatcherMux.Unlock() @@ -297,7 +297,7 @@ func (bm *batchManager) waitForNewMessages() (done bool) { func (bm *batchManager) dispatchMessage(msg *fftypes.Message, data ...*fftypes.Data) error { l := log.L(bm.ctx) - processor, err := bm.getProcessor(msg.Header.TxType, msg.Header.Type, msg.Header.Group, msg.Header.Namespace, &msg.Header.Identity) + processor, err := bm.getProcessor(msg.Header.TxType, msg.Header.Type, msg.Header.Group, msg.Header.Namespace, &msg.Header.SignerRef) if err != nil { return err } diff --git a/internal/batch/batch_manager_test.go b/internal/batch/batch_manager_test.go index 3e8f1287ac..a37ac73801 100644 --- a/internal/batch/batch_manager_test.go +++ b/internal/batch/batch_manager_test.go @@ -87,7 +87,7 @@ func TestE2EDispatchBroadcast(t *testing.T) { ID: fftypes.NewUUID(), Topics: []string{"topic1", "topic2"}, Namespace: "ns1", - Identity: fftypes.Identity{Author: "did:firefly:org/abcd", Key: "0x12345"}, + SignerRef: fftypes.SignerRef{Author: "did:firefly:org/abcd", Key: "0x12345"}, }, Data: fftypes.DataRefs{ {ID: dataID1, Hash: dataHash}, @@ -202,7 +202,7 @@ func TestE2EDispatchPrivateUnpinned(t *testing.T) { ID: fftypes.NewUUID(), Topics: []string{"topic1", "topic2"}, Namespace: "ns1", - Identity: fftypes.Identity{Author: "did:firefly:org/abcd", Key: "0x12345"}, + SignerRef: fftypes.SignerRef{Author: "did:firefly:org/abcd", Key: "0x12345"}, Group: &groupID, }, Data: fftypes.DataRefs{ diff --git a/internal/batch/batch_processor.go b/internal/batch/batch_processor.go index 4da5c3f1a6..4d17900ff9 100644 --- a/internal/batch/batch_processor.go +++ b/internal/batch/batch_processor.go @@ -44,7 +44,7 @@ type batchProcessorConf struct { dispatcherName string txType fftypes.TransactionType namespace string - identity fftypes.Identity + identity fftypes.SignerRef group *fftypes.Bytes32 dispatch DispatchHandler } @@ -373,7 +373,7 @@ func (bp *batchProcessor) buildFlushBatch(id *fftypes.UUID, newWork []*batchWork batch := &fftypes.Batch{ ID: id, Namespace: bp.conf.namespace, - Identity: bp.conf.identity, + SignerRef: bp.conf.identity, Group: bp.conf.group, Payload: fftypes.BatchPayload{}, Created: fftypes.Now(), @@ -517,6 +517,7 @@ func (bp *batchProcessor) markMessagesDispatched(batch *fftypes.Batch) error { for _, msg := range batch.Payload.Messages { // Emit a confirmation event locally immediately event := fftypes.NewEvent(fftypes.EventTypeMessageConfirmed, batch.Namespace, msg.Header.ID, batch.Payload.TX.ID) + event.Correlator = msg.Header.CID if err := bp.database.InsertEvent(ctx, event); err != nil { return err } diff --git a/internal/batch/batch_processor_test.go b/internal/batch/batch_processor_test.go index 56abea0113..63b3cd89ff 100644 --- a/internal/batch/batch_processor_test.go +++ b/internal/batch/batch_processor_test.go @@ -38,7 +38,7 @@ func newTestBatchProcessor(dispatch DispatchHandler) (*databasemocks.Plugin, *ba bp := newBatchProcessor(context.Background(), mni, mdi, &batchProcessorConf{ namespace: "ns1", txType: fftypes.TransactionTypeBatchPin, - identity: fftypes.Identity{Author: "did:firefly:org/abcd", Key: "0x12345"}, + identity: fftypes.SignerRef{Author: "did:firefly:org/abcd", Key: "0x12345"}, dispatch: dispatch, DispatcherOptions: DispatcherOptions{ BatchMaxSize: 10, diff --git a/internal/batchpin/batchpin_test.go b/internal/batchpin/batchpin_test.go index b4ee1667a7..613b65b06e 100644 --- a/internal/batchpin/batchpin_test.go +++ b/internal/batchpin/batchpin_test.go @@ -56,7 +56,7 @@ func TestSubmitPinnedBatchOk(t *testing.T) { batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "id1", Key: "0x12345", }, @@ -90,7 +90,7 @@ func TestSubmitPinnedBatchWithMetricsOk(t *testing.T) { mmi := bp.metrics.(*metricsmocks.Manager) batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "id1", Key: "0x12345", }, @@ -125,7 +125,7 @@ func TestSubmitPinnedBatchOpFail(t *testing.T) { batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "id1", Key: "0x12345", }, diff --git a/internal/blockchain/ethereum/address_resolver.go b/internal/blockchain/ethereum/address_resolver.go index ba5c54ffe1..0ae8212955 100644 --- a/internal/blockchain/ethereum/address_resolver.go +++ b/internal/blockchain/ethereum/address_resolver.go @@ -80,7 +80,7 @@ func newAddressResolver(ctx context.Context, prefix config.Prefix) (ar *addressR return ar, nil } -func (ar *addressResolver) ResolveSigningKey(ctx context.Context, keyDescriptor string) (string, error) { +func (ar *addressResolver) NormalizeSigningKey(ctx context.Context, keyDescriptor string) (string, error) { if cached := ar.cache.Get(keyDescriptor); cached != nil { cached.Extend(ar.cacheTTL) diff --git a/internal/blockchain/ethereum/address_resolver_test.go b/internal/blockchain/ethereum/address_resolver_test.go index e728aae5c1..24e690b1fb 100644 --- a/internal/blockchain/ethereum/address_resolver_test.go +++ b/internal/blockchain/ethereum/address_resolver_test.go @@ -66,11 +66,11 @@ func TestAddressResolverInEthereumOKCached(t *testing.T) { addressResolver: ar, } - resolved, err := e.ResolveSigningKey(ctx, "testkeystring") + resolved, err := e.NormalizeSigningKey(ctx, "testkeystring") assert.NoError(t, err) assert.Equal(t, strings.ToLower(addr), resolved) - resolved, err = e.ResolveSigningKey(ctx, "testkeystring") // cached + resolved, err = e.NormalizeSigningKey(ctx, "testkeystring") // cached assert.NoError(t, err) assert.Equal(t, strings.ToLower(addr), resolved) } @@ -101,7 +101,7 @@ func TestAddressResolverPOSTOk(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - resolved, err := ar.ResolveSigningKey(ctx, "testkeystring") + resolved, err := ar.NormalizeSigningKey(ctx, "testkeystring") assert.NoError(t, err) assert.Equal(t, strings.ToLower(addr), resolved) @@ -128,7 +128,7 @@ func TestAddressResolverPOSTBadKey(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10341", err) } @@ -151,7 +151,7 @@ func TestAddressResolverPOSTResponse(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10341", err) } @@ -172,7 +172,7 @@ func TestAddressResolverFailureResponse(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10340", err) } @@ -193,7 +193,7 @@ func TestAddressResolverErrorResponse(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10339", err) } @@ -223,7 +223,7 @@ func TestAddressResolverErrorURLTemplate(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10338.*urlTemplate", err) } @@ -240,7 +240,7 @@ func TestAddressResolverErrorBodyTemplate(t *testing.T) { ar, err := newAddressResolver(ctx, prefix) assert.NoError(t, err) - _, err = ar.ResolveSigningKey(ctx, "testkeystring") + _, err = ar.NormalizeSigningKey(ctx, "testkeystring") assert.Regexp(t, "FF10338.*bodyTemplate", err) } diff --git a/internal/blockchain/ethereum/ethereum.go b/internal/blockchain/ethereum/ethereum.go index 85b070c4b7..9a0e6a7f90 100644 --- a/internal/blockchain/ethereum/ethereum.go +++ b/internal/blockchain/ethereum/ethereum.go @@ -141,6 +141,10 @@ func (e *Ethereum) Name() string { return "ethereum" } +func (e *Ethereum) VerifierType() fftypes.VerifierType { + return fftypes.VerifierTypeEthAddress +} + func (e *Ethereum) Init(ctx context.Context, prefix config.Prefix, callbacks blockchain.Callbacks) (err error) { ethconnectConf := prefix.SubPrefix(EthconnectConfigKey) @@ -283,7 +287,7 @@ func (e *Ethereum) handleBatchPinEvent(ctx context.Context, msgJSON fftypes.JSON return nil // move on } - authorAddress, err = e.ResolveSigningKey(ctx, authorAddress) + authorAddress, err = e.NormalizeSigningKey(ctx, authorAddress) if err != nil { log.L(ctx).Errorf("BatchPin event is not valid - bad from address (%s): %+v", err, msgJSON) return nil // move on @@ -337,7 +341,10 @@ func (e *Ethereum) handleBatchPinEvent(ctx context.Context, msgJSON fftypes.JSON } // If there's an error dispatching the event, we must return the error and shutdown - return e.callbacks.BatchPinComplete(batch, authorAddress) + return e.callbacks.BatchPinComplete(batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: authorAddress, + }) } func (e *Ethereum) handleContractEvent(ctx context.Context, msgJSON fftypes.JSONObject) (err error) { @@ -487,10 +494,10 @@ func validateEthAddress(ctx context.Context, key string) (string, error) { return "", i18n.NewError(ctx, i18n.MsgInvalidEthAddress) } -func (e *Ethereum) ResolveSigningKey(ctx context.Context, key string) (string, error) { +func (e *Ethereum) NormalizeSigningKey(ctx context.Context, key string) (string, error) { resolved, err := validateEthAddress(ctx, key) if err != nil && e.addressResolver != nil { - resolved, err := e.addressResolver.ResolveSigningKey(ctx, key) + resolved, err := e.addressResolver.NormalizeSigningKey(ctx, key) if err == nil { log.L(ctx).Infof("Key '%s' resolved to '%s'", key, resolved) } diff --git a/internal/blockchain/ethereum/ethereum_test.go b/internal/blockchain/ethereum/ethereum_test.go index fd192ca37e..2e85c207a0 100644 --- a/internal/blockchain/ethereum/ethereum_test.go +++ b/internal/blockchain/ethereum/ethereum_test.go @@ -173,6 +173,7 @@ func TestInitAllNewStreamsAndWSEvent(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "ethereum", e.Name()) + assert.Equal(t, fftypes.VerifierTypeEthAddress, e.VerifierType()) assert.Equal(t, 4, httpmock.GetTotalCallCount()) assert.Equal(t, "es12345", e.initInfo.stream.ID) assert.Equal(t, "sub12345", e.initInfo.sub.ID) @@ -625,10 +626,10 @@ func TestVerifyEthAddress(t *testing.T) { e, cancel := newTestEthereum() defer cancel() - _, err := e.ResolveSigningKey(context.Background(), "0x12345") + _, err := e.NormalizeSigningKey(context.Background(), "0x12345") assert.Regexp(t, "FF10141", err) - key, err := e.ResolveSigningKey(context.Background(), "0x2a7c9D5248681CE6c393117E641aD037F5C079F6") + key, err := e.NormalizeSigningKey(context.Background(), "0x2a7c9D5248681CE6c393117E641aD037F5C079F6") assert.NoError(t, err) assert.Equal(t, "0x2a7c9d5248681ce6c393117e641ad037f5c079f6", key) @@ -708,7 +709,12 @@ func TestHandleMessageBatchPinOK(t *testing.T) { ID: "sb-b5b97a4e-a317-4053-6400-1474650efcb5", } - em.On("BatchPinComplete", mock.Anything, "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", mock.Anything).Return(nil) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", + } + + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef, mock.Anything).Return(nil) var events []interface{} err := json.Unmarshal(data.Bytes(), &events) @@ -722,7 +728,7 @@ func TestHandleMessageBatchPinOK(t *testing.T) { assert.Equal(t, "847d3bfd-0742-49ef-b65d-3fed15f5b0a6", b.BatchID.String()) assert.Equal(t, "d71eb138d74c229a388eb0e1abc03f4c7cbb21d4fc4b839fbf0ec73e4263f6be", b.BatchHash.String()) assert.Equal(t, "Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD", b.BatchPayloadRef) - assert.Equal(t, "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", em.Calls[0].Arguments[1]) + assert.Equal(t, expectedSigningKeyRef, em.Calls[0].Arguments[1]) assert.Len(t, b.Contexts, 2) assert.Equal(t, "68e4da79f805bca5b912bcda9c63d03e6e867108dabb9b944109aea541ef522a", b.Contexts[0].String()) assert.Equal(t, "19b82093de5ce92a01e333048e877e2374354bf846dd034864ef6ffbd6438771", b.Contexts[1].String()) @@ -790,7 +796,12 @@ func TestHandleMessageEmptyPayloadRef(t *testing.T) { ID: "sb-b5b97a4e-a317-4053-6400-1474650efcb5", } - em.On("BatchPinComplete", mock.Anything, "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", mock.Anything).Return(nil) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", + } + + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef, mock.Anything).Return(nil) var events []interface{} err := json.Unmarshal(data.Bytes(), &events) @@ -804,7 +815,7 @@ func TestHandleMessageEmptyPayloadRef(t *testing.T) { assert.Equal(t, "847d3bfd-0742-49ef-b65d-3fed15f5b0a6", b.BatchID.String()) assert.Equal(t, "d71eb138d74c229a388eb0e1abc03f4c7cbb21d4fc4b839fbf0ec73e4263f6be", b.BatchHash.String()) assert.Empty(t, b.BatchPayloadRef) - assert.Equal(t, "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", em.Calls[0].Arguments[1]) + assert.Equal(t, expectedSigningKeyRef, em.Calls[0].Arguments[1]) assert.Len(t, b.Contexts, 2) assert.Equal(t, "68e4da79f805bca5b912bcda9c63d03e6e867108dabb9b944109aea541ef522a", b.Contexts[0].String()) assert.Equal(t, "19b82093de5ce92a01e333048e877e2374354bf846dd034864ef6ffbd6438771", b.Contexts[1].String()) @@ -835,6 +846,11 @@ func TestHandleMessageBatchPinExit(t *testing.T) { } ]`) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", + } + em := &blockchainmocks.Callbacks{} e := &Ethereum{ callbacks: em, @@ -843,7 +859,7 @@ func TestHandleMessageBatchPinExit(t *testing.T) { ID: "sb-b5b97a4e-a317-4053-6400-1474650efcb5", } - em.On("BatchPinComplete", mock.Anything, "0x91d2b4381a4cd5c7c0f27565a7d4b829844c8635", mock.Anything).Return(fmt.Errorf("pop")) + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef, mock.Anything).Return(fmt.Errorf("pop")) var events []interface{} err := json.Unmarshal(data.Bytes(), &events) diff --git a/internal/blockchain/fabric/fabric.go b/internal/blockchain/fabric/fabric.go index 7a76b9e7d9..36a91eeab0 100644 --- a/internal/blockchain/fabric/fabric.go +++ b/internal/blockchain/fabric/fabric.go @@ -149,6 +149,10 @@ func (f *Fabric) Name() string { return "fabric" } +func (f *Fabric) VerifierType() fftypes.VerifierType { + return fftypes.VerifierTypeMSPIdentity +} + func (f *Fabric) Init(ctx context.Context, prefix config.Prefix, callbacks blockchain.Callbacks) (err error) { fabconnectConf := prefix.SubPrefix(FabconnectConfigKey) @@ -321,7 +325,10 @@ func (f *Fabric) handleBatchPinEvent(ctx context.Context, msgJSON fftypes.JSONOb } // If there's an error dispatching the event, we must return the error and shutdown - return f.callbacks.BatchPinComplete(batch, signer) + return f.callbacks.BatchPinComplete(batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeMSPIdentity, + Value: signer, + }) } func (f *Fabric) handleContractEvent(ctx context.Context, msgJSON fftypes.JSONObject) (err error) { @@ -466,7 +473,7 @@ func (f *Fabric) eventLoop() { } } -func (f *Fabric) ResolveSigningKey(ctx context.Context, signingKeyInput string) (string, error) { +func (f *Fabric) NormalizeSigningKey(ctx context.Context, signingKeyInput string) (string, error) { // we expand the short user name into the fully qualified onchain identity: // mspid::x509::{ecert DN}::{CA DN} return signingKeyInput, nil if !fullIdentityPattern.MatchString(signingKeyInput) { diff --git a/internal/blockchain/fabric/fabric_test.go b/internal/blockchain/fabric/fabric_test.go index 84b6b6096d..8848ae52d9 100644 --- a/internal/blockchain/fabric/fabric_test.go +++ b/internal/blockchain/fabric/fabric_test.go @@ -173,6 +173,7 @@ func TestInitAllNewStreamsAndWSEvent(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "fabric", e.Name()) + assert.Equal(t, fftypes.VerifierTypeMSPIdentity, e.VerifierType()) assert.Equal(t, 4, httpmock.GetTotalCallCount()) assert.Equal(t, "es12345", e.initInfo.stream.ID) assert.Equal(t, "sub12345", e.initInfo.sub.ID) @@ -483,7 +484,7 @@ func TestResolveFullIDSigner(t *testing.T) { defer cancel() id := "org1MSP::x509::CN=admin,OU=client::CN=fabric-ca-server" - signKey, err := e.ResolveSigningKey(context.Background(), id) + signKey, err := e.NormalizeSigningKey(context.Background(), id) assert.NoError(t, err) assert.Equal(t, "org1MSP::x509::CN=admin,OU=client::CN=fabric-ca-server", signKey) @@ -503,7 +504,7 @@ func TestResolveSigner(t *testing.T) { responder, _ := httpmock.NewJsonResponder(200, res) httpmock.RegisterResponder("GET", `http://localhost:12345/identities/signer001`, responder) - resolved, err := e.ResolveSigningKey(context.Background(), "signer001") + resolved, err := e.NormalizeSigningKey(context.Background(), "signer001") assert.NoError(t, err) assert.Equal(t, "org1MSP::x509::CN=admin,OU=client::CN=fabric-ca-server", resolved) } @@ -518,7 +519,7 @@ func TestResolveSignerFailedFabricCARequest(t *testing.T) { responder, _ := httpmock.NewJsonResponder(503, res) httpmock.RegisterResponder("GET", `http://localhost:12345/identities/signer001`, responder) - _, err := e.ResolveSigningKey(context.Background(), "signer001") + _, err := e.NormalizeSigningKey(context.Background(), "signer001") assert.EqualError(t, err, "FF10284: Error from fabconnect: %!!(MISSING)s()") } @@ -536,7 +537,7 @@ func TestResolveSignerBadECertReturned(t *testing.T) { responder, _ := httpmock.NewJsonResponder(200, res) httpmock.RegisterResponder("GET", `http://localhost:12345/identities/signer001`, responder) - _, err := e.ResolveSigningKey(context.Background(), "signer001") + _, err := e.NormalizeSigningKey(context.Background(), "signer001") assert.Contains(t, err.Error(), "FF10286: Failed to decode certificate:") } @@ -554,7 +555,7 @@ func TestResolveSignerBadCACertReturned(t *testing.T) { responder, _ := httpmock.NewJsonResponder(200, res) httpmock.RegisterResponder("GET", `http://localhost:12345/identities/signer001`, responder) - _, err := e.ResolveSigningKey(context.Background(), "signer001") + _, err := e.NormalizeSigningKey(context.Background(), "signer001") assert.Contains(t, err.Error(), "FF10286: Failed to decode certificate:") } @@ -601,7 +602,12 @@ func TestHandleMessageBatchPinOK(t *testing.T) { ID: "sb-0910f6a8-7bd6-4ced-453e-2db68149ce8e", } - em.On("BatchPinComplete", mock.Anything, "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server").Return(nil) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeMSPIdentity, + Value: "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", + } + + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef).Return(nil) var events []interface{} err := json.Unmarshal(data, &events) @@ -615,7 +621,7 @@ func TestHandleMessageBatchPinOK(t *testing.T) { assert.Equal(t, "847d3bfd-0742-49ef-b65d-3fed15f5b0a6", b.BatchID.String()) assert.Equal(t, "d71eb138d74c229a388eb0e1abc03f4c7cbb21d4fc4b839fbf0ec73e4263f6be", b.BatchHash.String()) assert.Equal(t, "Qmf412jQZiuVUtdgnB36FXFX7xg5V6KEbSJ4dpQuhkLyfD", b.BatchPayloadRef) - assert.Equal(t, "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", em.Calls[1].Arguments[1]) + assert.Equal(t, expectedSigningKeyRef, em.Calls[1].Arguments[1]) assert.Len(t, b.Contexts, 2) assert.Equal(t, "68e4da79f805bca5b912bcda9c63d03e6e867108dabb9b944109aea541ef522a", b.Contexts[0].String()) assert.Equal(t, "19b82093de5ce92a01e333048e877e2374354bf846dd034864ef6ffbd6438771", b.Contexts[1].String()) @@ -645,7 +651,12 @@ func TestHandleMessageEmptyPayloadRef(t *testing.T) { ID: "sb-0910f6a8-7bd6-4ced-453e-2db68149ce8e", } - em.On("BatchPinComplete", mock.Anything, "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", mock.Anything, mock.Anything).Return(nil) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeMSPIdentity, + Value: "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", + } + + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef, mock.Anything, mock.Anything).Return(nil) var events []interface{} err := json.Unmarshal(data, &events) @@ -659,7 +670,7 @@ func TestHandleMessageEmptyPayloadRef(t *testing.T) { assert.Equal(t, "847d3bfd-0742-49ef-b65d-3fed15f5b0a6", b.BatchID.String()) assert.Equal(t, "d71eb138d74c229a388eb0e1abc03f4c7cbb21d4fc4b839fbf0ec73e4263f6be", b.BatchHash.String()) assert.Empty(t, b.BatchPayloadRef) - assert.Equal(t, "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", em.Calls[0].Arguments[1]) + assert.Equal(t, expectedSigningKeyRef, em.Calls[0].Arguments[1]) assert.Len(t, b.Contexts, 2) assert.Equal(t, "68e4da79f805bca5b912bcda9c63d03e6e867108dabb9b944109aea541ef522a", b.Contexts[0].String()) assert.Equal(t, "19b82093de5ce92a01e333048e877e2374354bf846dd034864ef6ffbd6438771", b.Contexts[1].String()) @@ -689,7 +700,12 @@ func TestHandleMessageBatchPinExit(t *testing.T) { ID: "sb-0910f6a8-7bd6-4ced-453e-2db68149ce8e", } - em.On("BatchPinComplete", mock.Anything, "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) + expectedSigningKeyRef := &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeMSPIdentity, + Value: "u0vgwu9s00-x509::CN=user2,OU=client::CN=fabric-ca-server", + } + + em.On("BatchPinComplete", mock.Anything, expectedSigningKeyRef, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) var events []interface{} err := json.Unmarshal(data, &events) diff --git a/internal/broadcast/datatype_test.go b/internal/broadcast/datatype_test.go index 977e087415..97381682ba 100644 --- a/internal/broadcast/datatype_test.go +++ b/internal/broadcast/datatype_test.go @@ -60,7 +60,7 @@ func TestBroadcastDatatypeBadValue(t *testing.T) { mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) _, err := bm.BroadcastDatatype(context.Background(), "ns1", &fftypes.Datatype{ Namespace: "ns1", Name: "ent1", @@ -77,7 +77,7 @@ func TestBroadcastUpsertFail(t *testing.T) { mdm := bm.data.(*datamocks.Manager) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) @@ -119,7 +119,7 @@ func TestBroadcastBroadcastFail(t *testing.T) { mdm := bm.data.(*datamocks.Manager) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) @@ -141,7 +141,7 @@ func TestBroadcastOk(t *testing.T) { mdm := bm.data.(*datamocks.Manager) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) diff --git a/internal/broadcast/definition.go b/internal/broadcast/definition.go index 17f218eb36..3a2e36a4a6 100644 --- a/internal/broadcast/definition.go +++ b/internal/broadcast/definition.go @@ -21,17 +21,18 @@ import ( "encoding/json" "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (bm *broadcastManager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) { - return bm.BroadcastDefinition(ctx, ns, def, &fftypes.Identity{ /* resolve to node default */ }, tag, waitConfirm) +func (bm *broadcastManager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag string, waitConfirm bool) (msg *fftypes.Message, err error) { + return bm.BroadcastDefinition(ctx, ns, def, &fftypes.SignerRef{ /* resolve to node default */ }, tag, waitConfirm) } -func (bm *broadcastManager) BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) { +func (bm *broadcastManager) BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (msg *fftypes.Message, err error) { - err = bm.identity.ResolveInputIdentity(ctx, signingIdentity) + err = bm.identity.ResolveInputSigningIdentity(ctx, ns, signingIdentity) if err != nil { return nil, err } @@ -39,14 +40,19 @@ func (bm *broadcastManager) BroadcastDefinition(ctx context.Context, ns string, return bm.broadcastDefinitionCommon(ctx, ns, def, signingIdentity, tag, waitConfirm) } -func (bm *broadcastManager) BroadcastRootOrgDefinition(ctx context.Context, def *fftypes.Organization, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) { +// BroadcastIdentityClaim is a special form of BroadcastDefinitionAsNode where the signing identity does not need to have been pre-registered +// The blockchain "key" will be normalized, but the "author" will pass through unchecked +func (bm *broadcastManager) BroadcastIdentityClaim(ctx context.Context, ns string, def *fftypes.IdentityClaim, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (msg *fftypes.Message, err error) { - signingIdentity.Author = bm.identity.OrgDID(def) + signingIdentity.Key, err = bm.identity.NormalizeSigningKey(ctx, signingIdentity.Key, identity.KeyNormalizationBlockchainPlugin) + if err != nil { + return nil, err + } - return bm.broadcastDefinitionCommon(ctx, fftypes.SystemNamespace, def, signingIdentity, tag, waitConfirm) + return bm.broadcastDefinitionCommon(ctx, ns, def, signingIdentity, tag, waitConfirm) } -func (bm *broadcastManager) broadcastDefinitionCommon(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) { +func (bm *broadcastManager) broadcastDefinitionCommon(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (msg *fftypes.Message, err error) { // Serialize it into a data object, as a piece of data we can write to a message data := &fftypes.Data{ @@ -75,9 +81,9 @@ func (bm *broadcastManager) broadcastDefinitionCommon(ctx context.Context, ns st Header: fftypes.MessageHeader{ Namespace: ns, Type: fftypes.MessageTypeDefinition, - Identity: *signingIdentity, + SignerRef: *signingIdentity, Topics: fftypes.FFStringArray{def.Topic()}, - Tag: string(tag), + Tag: tag, TxType: fftypes.TransactionTypeBatchPin, }, Data: fftypes.DataRefs{ diff --git a/internal/broadcast/definition_test.go b/internal/broadcast/definition_test.go index 057ef9e211..6afeb24585 100644 --- a/internal/broadcast/definition_test.go +++ b/internal/broadcast/definition_test.go @@ -20,6 +20,7 @@ import ( "fmt" "testing" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/mocks/syncasyncmocks" @@ -38,7 +39,7 @@ func TestBroadcastDefinitionAsNodeConfirm(t *testing.T) { mim := bm.identity.(*identitymanagermocks.Manager) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ff_system", mock.Anything).Return(nil) msa.On("WaitForMessage", bm.ctx, "ff_system", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) _, err := bm.BroadcastDefinitionAsNode(bm.ctx, fftypes.SystemNamespace, &fftypes.Namespace{}, fftypes.SystemTagDefineNamespace, true) @@ -49,6 +50,48 @@ func TestBroadcastDefinitionAsNodeConfirm(t *testing.T) { mim.AssertExpectations(t) } +func TestBroadcastIdentityClaim(t *testing.T) { + bm, cancel := newTestBroadcast(t) + defer cancel() + + mdi := bm.database.(*databasemocks.Plugin) + msa := bm.syncasync.(*syncasyncmocks.Bridge) + mim := bm.identity.(*identitymanagermocks.Manager) + + mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) + mim.On("NormalizeSigningKey", mock.Anything, "0x1234", identity.KeyNormalizationBlockchainPlugin).Return("", nil) + msa.On("WaitForMessage", bm.ctx, "ff_system", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) + + _, err := bm.BroadcastIdentityClaim(bm.ctx, fftypes.SystemNamespace, &fftypes.IdentityClaim{ + Identity: &fftypes.Identity{}, + }, &fftypes.SignerRef{ + Key: "0x1234", + }, fftypes.SystemTagDefineNamespace, true) + assert.EqualError(t, err, "pop") + + mdi.AssertExpectations(t) + msa.AssertExpectations(t) + mim.AssertExpectations(t) +} + +func TestBroadcastIdentityClaimFail(t *testing.T) { + bm, cancel := newTestBroadcast(t) + defer cancel() + + mim := bm.identity.(*identitymanagermocks.Manager) + + mim.On("NormalizeSigningKey", mock.Anything, "0x1234", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) + + _, err := bm.BroadcastIdentityClaim(bm.ctx, fftypes.SystemNamespace, &fftypes.IdentityClaim{ + Identity: &fftypes.Identity{}, + }, &fftypes.SignerRef{ + Key: "0x1234", + }, fftypes.SystemTagDefineNamespace, true) + assert.EqualError(t, err, "pop") + + mim.AssertExpectations(t) +} + func TestBroadcastDatatypeDefinitionAsNodeConfirm(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() @@ -59,7 +102,7 @@ func TestBroadcastDatatypeDefinitionAsNodeConfirm(t *testing.T) { ns := "customNamespace" mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, ns, mock.Anything).Return(nil) msa.On("WaitForMessage", bm.ctx, ns, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) _, err := bm.BroadcastDefinitionAsNode(bm.ctx, ns, &fftypes.Datatype{}, fftypes.SystemTagDefineNamespace, true) @@ -77,7 +120,7 @@ func TestBroadcastDefinitionAsNodeUpsertFail(t *testing.T) { mdi := bm.database.(*databasemocks.Plugin) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, fftypes.SystemNamespace, mock.Anything).Return(nil) _, err := bm.BroadcastDefinitionAsNode(bm.ctx, fftypes.SystemNamespace, &fftypes.Namespace{}, fftypes.SystemTagDefineNamespace, false) assert.Regexp(t, "pop", err) } @@ -87,31 +130,10 @@ func TestBroadcastDefinitionBadIdentity(t *testing.T) { defer cancel() mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - _, err := bm.BroadcastDefinition(bm.ctx, fftypes.SystemNamespace, &fftypes.Namespace{}, &fftypes.Identity{ + mim.On("ResolveInputSigningIdentity", mock.Anything, fftypes.SystemNamespace, mock.Anything).Return(fmt.Errorf("pop")) + _, err := bm.BroadcastDefinition(bm.ctx, fftypes.SystemNamespace, &fftypes.Namespace{}, &fftypes.SignerRef{ Author: "wrong", Key: "wrong", }, fftypes.SystemTagDefineNamespace, false) assert.Regexp(t, "pop", err) } - -func TestBroadcastRootOrgDefinitionPassedThroughAnyIdentity(t *testing.T) { - bm, cancel := newTestBroadcast(t) - defer cancel() - - mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("OrgDID", mock.Anything, mock.Anything).Return("did:firefly:org/12345", nil) - // Should call through to upsert data, stop test there - mdi := bm.database.(*databasemocks.Plugin) - mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) - - _, err := bm.BroadcastRootOrgDefinition(bm.ctx, &fftypes.Organization{ - ID: fftypes.NewUUID(), - }, &fftypes.Identity{ - Author: "anything - overridden", - Key: "0x12345", - }, fftypes.SystemTagDefineNamespace, false) - assert.Regexp(t, "pop", err) - - mim.AssertExpectations(t) -} diff --git a/internal/broadcast/manager.go b/internal/broadcast/manager.go index 3f580553e9..e002898f4e 100644 --- a/internal/broadcast/manager.go +++ b/internal/broadcast/manager.go @@ -45,9 +45,9 @@ type Manager interface { BroadcastDatatype(ctx context.Context, ns string, datatype *fftypes.Datatype, waitConfirm bool) (msg *fftypes.Message, err error) BroadcastNamespace(ctx context.Context, ns *fftypes.Namespace, waitConfirm bool) (msg *fftypes.Message, err error) BroadcastMessage(ctx context.Context, ns string, in *fftypes.MessageInOut, waitConfirm bool) (out *fftypes.Message, err error) - BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) - BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) - BroadcastRootOrgDefinition(ctx context.Context, def *fftypes.Organization, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (msg *fftypes.Message, err error) + BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag string, waitConfirm bool) (msg *fftypes.Message, err error) + BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (msg *fftypes.Message, err error) + BroadcastIdentityClaim(ctx context.Context, ns string, def *fftypes.IdentityClaim, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (msg *fftypes.Message, err error) BroadcastTokenPool(ctx context.Context, ns string, pool *fftypes.TokenPoolAnnouncement, waitConfirm bool) (msg *fftypes.Message, err error) Start() error WaitStop() @@ -142,6 +142,7 @@ func (bm *broadcastManager) submitTXAndUpdateDB(ctx context.Context, batch *ffty return err } + log.L(ctx).Infof("Pinning broadcast batch %s with author=%s key=%s", batch.ID, batch.Author, batch.Key) return bm.batchpin.SubmitPinnedBatch(ctx, batch, contexts) } diff --git a/internal/broadcast/manager_test.go b/internal/broadcast/manager_test.go index 48291c6e1d..c9aa843761 100644 --- a/internal/broadcast/manager_test.go +++ b/internal/broadcast/manager_test.go @@ -190,7 +190,7 @@ func TestDispatchBatchSubmitBroadcastFail(t *testing.T) { mdi.On("InsertOperation", mock.Anything, mock.Anything).Return(nil) mbp.On("SubmitPinnedBatch", mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - err := bm.dispatchBatch(context.Background(), &fftypes.Batch{Identity: fftypes.Identity{Author: "wrong", Key: "wrong"}}, []*fftypes.Bytes32{fftypes.NewRandB32()}) + err := bm.dispatchBatch(context.Background(), &fftypes.Batch{SignerRef: fftypes.SignerRef{Author: "wrong", Key: "wrong"}}, []*fftypes.Bytes32{fftypes.NewRandB32()}) assert.EqualError(t, err, "pop") } @@ -203,7 +203,7 @@ func TestSubmitTXAndUpdateDBUpdateBatchFail(t *testing.T) { mdi.On("UpdateBatch", mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) bm.blockchain.(*blockchainmocks.Plugin).On("SubmitBatchPin", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return("", fmt.Errorf("pop")) - err := bm.submitTXAndUpdateDB(context.Background(), &fftypes.Batch{Identity: fftypes.Identity{Author: "org1", Key: "0x12345"}}, []*fftypes.Bytes32{fftypes.NewRandB32()}) + err := bm.submitTXAndUpdateDB(context.Background(), &fftypes.Batch{SignerRef: fftypes.SignerRef{Author: "org1", Key: "0x12345"}}, []*fftypes.Bytes32{fftypes.NewRandB32()}) assert.Regexp(t, "pop", err) } @@ -220,7 +220,7 @@ func TestSubmitTXAndUpdateDBAddOp1Fail(t *testing.T) { mbi.On("Name").Return("unittest") batch := &fftypes.Batch{ - Identity: fftypes.Identity{Author: "org1", Key: "0x12345"}, + SignerRef: fftypes.SignerRef{Author: "org1", Key: "0x12345"}, Payload: fftypes.BatchPayload{ Messages: []*fftypes.Message{ {Header: fftypes.MessageHeader{ @@ -249,7 +249,7 @@ func TestSubmitTXAndUpdateDBSucceed(t *testing.T) { msgID := fftypes.NewUUID() batch := &fftypes.Batch{ - Identity: fftypes.Identity{Author: "org1", Key: "0x12345"}, + SignerRef: fftypes.SignerRef{Author: "org1", Key: "0x12345"}, Payload: fftypes.BatchPayload{ TX: fftypes.TransactionRef{ Type: fftypes.TransactionTypeBatchPin, diff --git a/internal/broadcast/message.go b/internal/broadcast/message.go index a4c3b31f6b..e26da5cb7a 100644 --- a/internal/broadcast/message.go +++ b/internal/broadcast/message.go @@ -18,7 +18,6 @@ package broadcast import ( "context" - "encoding/json" "github.com/hyperledger/firefly/internal/i18n" "github.com/hyperledger/firefly/internal/log" @@ -135,8 +134,8 @@ func (s *broadcastSender) resolveAndSend(ctx context.Context, method sendMethod) func (s *broadcastSender) resolve(ctx context.Context) ([]*fftypes.DataAndBlob, error) { // Resolve the sending identity - if !s.isRootOrgBroadcast(ctx) { - if err := s.mgr.identity.ResolveInputIdentity(ctx, &s.msg.Header.Identity); err != nil { + if s.msg.Header.Type != fftypes.MessageTypeDefinition || s.msg.Header.Tag != fftypes.SystemTagIdentityClaim { + if err := s.mgr.identity.ResolveInputSigningIdentity(ctx, s.msg.Header.Namespace, &s.msg.Header.SignerRef); err != nil { return nil, i18n.WrapError(ctx, err, i18n.MsgAuthorInvalid) } } @@ -172,28 +171,3 @@ func (s *broadcastSender) sendInternal(ctx context.Context, method sendMethod) ( return err } - -func (s *broadcastSender) isRootOrgBroadcast(ctx context.Context) bool { - // Look into message to see if it contains a data item that is a root organization definition - if s.msg.Header.Type == fftypes.MessageTypeDefinition { - messageData, ok, err := s.mgr.data.GetMessageData(ctx, &s.msg.Message, true) - if ok && err == nil { - if len(messageData) > 0 { - dataItem := messageData[0] - if dataItem.Validator == fftypes.MessageTypeDefinition { - var org *fftypes.Organization - if dataItem.Value != nil { - err := json.Unmarshal([]byte(*dataItem.Value), &org) - if err != nil { - return false - } - } - if org != nil && org.Name != "" && org.ID != nil && org.Parent == "" { - return true - } - } - } - } - } - return false -} diff --git a/internal/broadcast/message_test.go b/internal/broadcast/message_test.go index 680f74fb10..d234ad387e 100644 --- a/internal/broadcast/message_test.go +++ b/internal/broadcast/message_test.go @@ -19,8 +19,6 @@ package broadcast import ( "bytes" "context" - "encoding/json" - "errors" "fmt" "io" "io/ioutil" @@ -56,12 +54,12 @@ func TestBroadcastMessageOk(t *testing.T) { {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{}, nil) mdi.On("UpsertMessage", ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, @@ -80,109 +78,6 @@ func TestBroadcastMessageOk(t *testing.T) { mdm.AssertExpectations(t) } -func TestBroadcastRootOrg(t *testing.T) { - bm, cancel := newTestBroadcast(t) - defer cancel() - mdi := bm.database.(*databasemocks.Plugin) - mdm := bm.data.(*datamocks.Manager) - mim := bm.identity.(*identitymanagermocks.Manager) - - ctx := context.Background() - rag := mdi.On("RunAsGroup", ctx, mock.Anything) - rag.RunFn = func(a mock.Arguments) { - var fn = a[1].(func(context.Context) error) - rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} - } - - org := fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Parent: "", // root - } - orgBytes, err := json.Marshal(&org) - assert.NoError(t, err) - - data := &fftypes.Data{ - ID: fftypes.NewUUID(), - Value: fftypes.JSONAnyPtrBytes(orgBytes), - Validator: fftypes.MessageTypeDefinition, - } - - mdm.On("GetMessageData", ctx, mock.Anything, mock.Anything).Return([]*fftypes.Data{data}, true, nil) - mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ - {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, - }, []*fftypes.DataAndBlob{}, nil) - mdi.On("UpsertMessage", ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) - - msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ - Message: fftypes.Message{ - Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeDefinition, - Identity: fftypes.Identity{ - Author: "did:firefly:org/12345", - Key: "0x12345", - }, - }, - Data: fftypes.DataRefs{ - { - ID: data.ID, - Hash: data.Hash, - }, - }, - }, - }, false) - assert.NoError(t, err) - assert.NotNil(t, msg.Data[0].ID) - assert.NotNil(t, msg.Data[0].Hash) - assert.Equal(t, "ns1", msg.Header.Namespace) - - mdi.AssertExpectations(t) - mdm.AssertExpectations(t) -} - -func TestBroadcastRootOrgBadData(t *testing.T) { - bm, cancel := newTestBroadcast(t) - defer cancel() - mdi := bm.database.(*databasemocks.Plugin) - mdm := bm.data.(*datamocks.Manager) - mim := bm.identity.(*identitymanagermocks.Manager) - - ctx := context.Background() - data := &fftypes.Data{ - ID: fftypes.NewUUID(), - Value: fftypes.JSONAnyPtr("not an org"), - Validator: fftypes.MessageTypeDefinition, - } - - mdm.On("GetMessageData", ctx, mock.Anything, mock.Anything).Return([]*fftypes.Data{data}, true, nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(errors.New("not registered")) - - _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ - Message: fftypes.Message{ - Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeDefinition, - Identity: fftypes.Identity{ - Author: "did:firefly:org/12345", - Key: "0x12345", - }, - }, - Data: fftypes.DataRefs{ - { - ID: data.ID, - Hash: data.Hash, - }, - }, - }, - }, false) - assert.Error(t, err, "not registered") - - mdi.AssertExpectations(t) - mdm.AssertExpectations(t) -} - func TestBroadcastMessageWaitConfirmOk(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() @@ -200,7 +95,7 @@ func TestBroadcastMessageWaitConfirmOk(t *testing.T) { mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{}, nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) replyMsg := &fftypes.Message{ Header: fftypes.MessageHeader{ @@ -219,7 +114,7 @@ func TestBroadcastMessageWaitConfirmOk(t *testing.T) { msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, @@ -280,12 +175,12 @@ func TestBroadcastMessageWithBlobsOk(t *testing.T) { })).Return("payload-ref", nil) mdi.On("UpdateData", ctx, mock.Anything, mock.Anything).Return(nil) mdi.On("UpsertMessage", ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, @@ -323,12 +218,12 @@ func TestBroadcastMessageTooLarge(t *testing.T) { mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32(), ValueSize: 1000001}, }, []*fftypes.DataAndBlob{}, nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, @@ -358,7 +253,7 @@ func TestBroadcastMessageBadInput(t *testing.T) { rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(nil, nil, fmt.Errorf("pop")) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ @@ -377,7 +272,7 @@ func TestBroadcastMessageBadIdentity(t *testing.T) { ctx := context.Background() mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(fmt.Errorf("pop")) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(fmt.Errorf("pop")) _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ @@ -406,7 +301,7 @@ func TestPublishBlobsSendMessageFail(t *testing.T) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: dataID, Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{ @@ -428,7 +323,7 @@ func TestPublishBlobsSendMessageFail(t *testing.T) { _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, @@ -464,12 +359,12 @@ func TestBroadcastPrepare(t *testing.T) { mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{}, nil) - mim.On("ResolveInputIdentity", ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", ctx, "ns1", mock.Anything).Return(nil) msg := &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/abcd", Key: "0x12345", }, diff --git a/internal/broadcast/namespace_test.go b/internal/broadcast/namespace_test.go index eabbe539cb..bcf5066b84 100644 --- a/internal/broadcast/namespace_test.go +++ b/internal/broadcast/namespace_test.go @@ -67,7 +67,7 @@ func TestBroadcastNamespaceBroadcastOk(t *testing.T) { mdm := bm.data.(*datamocks.Manager) mim := bm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, fftypes.SystemNamespace, mock.Anything).Return(nil) mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(&fftypes.Namespace{Name: "ns1"}, nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) diff --git a/internal/broadcast/tokenpool_test.go b/internal/broadcast/tokenpool_test.go index 86b81b15b5..1b81ef3f88 100644 --- a/internal/broadcast/tokenpool_test.go +++ b/internal/broadcast/tokenpool_test.go @@ -96,7 +96,7 @@ func TestBroadcastTokenPoolBroadcastFail(t *testing.T) { }, } - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdi.On("UpsertMessage", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) @@ -127,7 +127,7 @@ func TestBroadcastTokenPoolOk(t *testing.T) { }, } - mim.On("ResolveInputIdentity", mock.Anything, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", mock.Anything, "ns1", mock.Anything).Return(nil) mdm.On("VerifyNamespaceExists", mock.Anything, "ns1").Return(nil) mdi.On("UpsertData", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdi.On("UpsertMessage", mock.Anything, mock.Anything, database.UpsertOptimizationNew).Return(nil) diff --git a/internal/config/config.go b/internal/config/config.go index 90a71edca1..d7425c6b0a 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -224,6 +224,8 @@ var ( AssetManagerRetryMaxDelay = rootKey("asset.manager.retry.maxDelay") // AssetManagerRetryFactor the backoff factor to use for retry of database operations AssetManagerRetryFactor = rootKey("asset.manager.retry.factor") + // AssetManagerKeyNormalization mechanism to normalize keys before using them. Valid options: "blockchain_plugin" - use blockchain plugin (default), "none" - do not attempt normalization + AssetManagerKeyNormalization = rootKey("asset.manager.keyNormalization") // UIEnabled set to false to disable the UI (default is true, so UI will be enabled if ui.path is valid) UIEnabled = rootKey("ui.enabled") // UIPath the path on which to serve the UI @@ -290,6 +292,7 @@ func Reset() { viper.SetDefault(string(APIMaxFilterSkip), 1000) // protects database (skip+limit pagination is not for bulk operations) viper.SetDefault(string(APIRequestTimeout), "120s") viper.SetDefault(string(APIShutdownTimeout), "10s") + viper.SetDefault(string(AssetManagerKeyNormalization), "blockchain_plugin") viper.SetDefault(string(BatchManagerReadPageSize), 100) viper.SetDefault(string(BatchManagerReadPollTimeout), "30s") viper.SetDefault(string(BatchRetryFactor), 2.0) diff --git a/internal/contracts/manager.go b/internal/contracts/manager.go index 81fc3eb771..9e7c8af9af 100644 --- a/internal/contracts/manager.go +++ b/internal/contracts/manager.go @@ -177,7 +177,7 @@ func (cm *contractManager) writeInvokeTransaction(ctx context.Context, ns string } func (cm *contractManager) InvokeContract(ctx context.Context, ns string, req *fftypes.ContractCallRequest) (res interface{}, err error) { - req.Key, err = cm.identity.ResolveSigningKey(ctx, req.Key) + req.Key, err = cm.identity.NormalizeSigningKey(ctx, req.Key, identity.KeyNormalizationBlockchainPlugin) if err != nil { return nil, err } diff --git a/internal/contracts/manager_test.go b/internal/contracts/manager_test.go index 785bd04eaa..a7c75aaf68 100644 --- a/internal/contracts/manager_test.go +++ b/internal/contracts/manager_test.go @@ -22,6 +22,7 @@ import ( "testing" "github.com/hyperledger/firefly/internal/blockchain/ethereum" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/mocks/blockchainmocks" "github.com/hyperledger/firefly/mocks/broadcastmocks" "github.com/hyperledger/firefly/mocks/databasemocks" @@ -992,7 +993,7 @@ func TestInvokeContract(t *testing.T) { mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(fftypes.NewUUID(), nil) - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdi.On("InsertOperation", mock.Anything, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Namespace == "ns1" && op.Type == fftypes.OpTypeBlockchainInvoke && op.Plugin == "mockblockchain" })).Return(nil) @@ -1026,7 +1027,7 @@ func TestInvokeContractFail(t *testing.T) { mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(fftypes.NewUUID(), nil) - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdi.On("InsertOperation", mock.Anything, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Namespace == "ns1" && op.Type == fftypes.OpTypeBlockchainInvoke && op.Plugin == "mockblockchain" })).Return(nil) @@ -1039,7 +1040,7 @@ func TestInvokeContractFail(t *testing.T) { mth.AssertExpectations(t) } -func TestInvokeContractFailResolveSigningKey(t *testing.T) { +func TestInvokeContractFailNormalizeSigningKey(t *testing.T) { cm := newTestContractManager() mim := cm.identity.(*identitymanagermocks.Manager) @@ -1050,7 +1051,7 @@ func TestInvokeContractFailResolveSigningKey(t *testing.T) { Location: fftypes.JSONAnyPtr(""), } - mim.On("ResolveSigningKey", mock.Anything, "").Return("", fmt.Errorf("pop")) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("", fmt.Errorf("pop")) _, err := cm.InvokeContract(context.Background(), "ns1", req) @@ -1069,7 +1070,7 @@ func TestInvokeContractFailResolve(t *testing.T) { Location: fftypes.JSONAnyPtr(""), } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mbi.On("InvokeContract", mock.Anything, mock.AnythingOfType("*fftypes.UUID"), "key-resolved", req.Location, req.Method, req.Input).Return(nil) _, err := cm.InvokeContract(context.Background(), "ns1", req) @@ -1095,7 +1096,7 @@ func TestInvokeContractTXFail(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(nil, fmt.Errorf("pop")) _, err := cm.InvokeContract(context.Background(), "ns1", req) @@ -1116,7 +1117,7 @@ func TestInvokeContractNoMethodSignature(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) _, err := cm.InvokeContract(context.Background(), "ns1", req) @@ -1138,7 +1139,7 @@ func TestInvokeContractMethodNotFound(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdb.On("GetFFIMethod", mock.Anything, "ns1", req.Interface, req.Method.Name).Return(nil, fmt.Errorf("pop")) _, err := cm.InvokeContract(context.Background(), "ns1", req) @@ -1176,7 +1177,7 @@ func TestInvokeContractMethodBadInput(t *testing.T) { }, }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) _, err := cm.InvokeContract(context.Background(), "ns1", req) assert.Regexp(t, "FF10304", err) @@ -1202,7 +1203,7 @@ func TestQueryContract(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", mock.Anything, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Namespace == "ns1" && op.Type == fftypes.OpTypeBlockchainInvoke && op.Plugin == "mockblockchain" @@ -1232,7 +1233,7 @@ func TestCallContractInvalidType(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(fftypes.NewUUID(), nil) mdi.On("InsertOperation", mock.Anything, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Namespace == "ns1" && op.Type == fftypes.OpTypeBlockchainInvoke && op.Plugin == "mockblockchain" @@ -1382,7 +1383,7 @@ func TestInvokeContractAPI(t *testing.T) { Location: fftypes.JSONAnyPtr(""), } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdb.On("GetContractAPIByName", mock.Anything, "ns1", "banana").Return(api, nil) mdb.On("GetFFIMethod", mock.Anything, "ns1", mock.Anything, mock.Anything).Return(&fftypes.FFIMethod{Name: "peel"}, nil) mth.On("SubmitNewTransaction", mock.Anything, "ns1", fftypes.TransactionTypeContractInvoke).Return(fftypes.NewUUID(), nil) @@ -1410,7 +1411,7 @@ func TestInvokeContractAPIFailContractLookup(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdb.On("GetContractAPIByName", mock.Anything, "ns1", "banana").Return(nil, fmt.Errorf("pop")) _, err := cm.InvokeContractAPI(context.Background(), "ns1", "banana", "peel", req) @@ -1432,7 +1433,7 @@ func TestInvokeContractAPIContractNotFound(t *testing.T) { }, } - mim.On("ResolveSigningKey", mock.Anything, "").Return("key-resolved", nil) + mim.On("NormalizeSigningKey", mock.Anything, "", identity.KeyNormalizationBlockchainPlugin).Return("key-resolved", nil) mdb.On("GetContractAPIByName", mock.Anything, "ns1", "banana").Return(nil, nil) _, err := cm.InvokeContractAPI(context.Background(), "ns1", "banana", "peel", req) diff --git a/internal/database/sqlcommon/batch_sql_test.go b/internal/database/sqlcommon/batch_sql_test.go index b82fd6f04b..c814f5d758 100644 --- a/internal/database/sqlcommon/batch_sql_test.go +++ b/internal/database/sqlcommon/batch_sql_test.go @@ -41,7 +41,7 @@ func TestBatch2EWithDB(t *testing.T) { batch := &fftypes.Batch{ ID: batchID, Type: fftypes.MessageTypeBroadcast, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Key: "0x12345", Author: "did:firefly:org/abcd", }, @@ -81,7 +81,7 @@ func TestBatch2EWithDB(t *testing.T) { batchUpdated := &fftypes.Batch{ ID: batchID, Type: fftypes.MessageTypeBroadcast, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Key: "0x12345", Author: "did:firefly:org/abcd", }, diff --git a/internal/database/sqlcommon/event_sql.go b/internal/database/sqlcommon/event_sql.go index 134efb8cb8..95b8257c80 100644 --- a/internal/database/sqlcommon/event_sql.go +++ b/internal/database/sqlcommon/event_sql.go @@ -33,13 +33,15 @@ var ( "etype", "namespace", "ref", + "cid", "tx_id", "created", } eventFilterFieldMap = map[string]string{ - "type": "etype", - "reference": "ref", - "tx": "tx_id", + "type": "etype", + "reference": "ref", + "correlator": "cid", + "tx": "tx_id", } ) @@ -83,6 +85,7 @@ func (s *SQLCommon) insertEventPreCommit(ctx context.Context, tx *txWrapper, eve string(event.Type), event.Namespace, event.Reference, + event.Correlator, event.Transaction, event.Created, ), @@ -100,6 +103,7 @@ func (s *SQLCommon) eventResult(ctx context.Context, row *sql.Rows) (*fftypes.Ev &event.Type, &event.Namespace, &event.Reference, + &event.Correlator, &event.Transaction, &event.Created, // Must be added to the list of columns in all selects diff --git a/internal/database/sqlcommon/event_sql_test.go b/internal/database/sqlcommon/event_sql_test.go index 9f428b6f67..3fce7b8e1d 100644 --- a/internal/database/sqlcommon/event_sql_test.go +++ b/internal/database/sqlcommon/event_sql_test.go @@ -39,11 +39,12 @@ func TestEventE2EWithDB(t *testing.T) { // Create a new event entry eventID := fftypes.NewUUID() event := &fftypes.Event{ - ID: eventID, - Namespace: "ns1", - Type: fftypes.EventTypeMessageConfirmed, - Reference: fftypes.NewUUID(), - Created: fftypes.Now(), + ID: eventID, + Namespace: "ns1", + Type: fftypes.EventTypeMessageConfirmed, + Reference: fftypes.NewUUID(), + Correlator: fftypes.NewUUID(), + Created: fftypes.Now(), } s.callbacks.On("OrderedUUIDCollectionNSEvent", database.CollectionEvents, fftypes.ChangeEventTypeCreated, "ns1", eventID, mock.Anything).Return() diff --git a/internal/database/sqlcommon/identity_sql.go b/internal/database/sqlcommon/identity_sql.go new file mode 100644 index 0000000000..e9397e9e51 --- /dev/null +++ b/internal/database/sqlcommon/identity_sql.go @@ -0,0 +1,250 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sqlcommon + +import ( + "context" + "database/sql" + + sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var ( + identityColumns = []string{ + "id", + "did", + "parent", + "itype", + "namespace", + "name", + "description", + "profile", + "messages_claim", + "messages_verification", + "messages_update", + "created", + "updated", + } + identityFilterFieldMap = map[string]string{ + "identity": "identity_id", + "type": "itype", + "messages.claim": "messages_claim", + "messages.verification": "messages_verification", + "messages.update": "messages_update", + } +) + +func (s *SQLCommon) attemptIdentityUpdate(ctx context.Context, tx *txWrapper, identity *fftypes.Identity) (int64, error) { + identity.Updated = fftypes.Now() + return s.updateTx(ctx, tx, + sq.Update("identities"). + Set("did", identity.DID). + Set("parent", identity.Parent). + Set("itype", identity.Type). + Set("namespace", identity.Namespace). + Set("name", identity.Name). + Set("description", identity.Description). + Set("profile", identity.Profile). + Set("messages_claim", identity.Messages.Claim). + Set("messages_verification", identity.Messages.Verification). + Set("messages_update", identity.Messages.Update). + Set("updated", identity.Updated). + Where(sq.Eq{ + "id": identity.ID, + }), + func() { + s.callbacks.UUIDCollectionNSEvent(database.CollectionIdentities, fftypes.ChangeEventTypeUpdated, identity.Namespace, identity.ID) + }) +} + +func (s *SQLCommon) attemptIdentityInsert(ctx context.Context, tx *txWrapper, identity *fftypes.Identity, requestConflictEmptyResult bool) (err error) { + identity.Created = fftypes.Now() + identity.Updated = identity.Created + _, err = s.insertTxExt(ctx, tx, + sq.Insert("identities"). + Columns(identityColumns...). + Values( + identity.ID, + identity.DID, + identity.Parent, + identity.Type, + identity.Namespace, + identity.Name, + identity.Description, + identity.Profile, + identity.Messages.Claim, + identity.Messages.Verification, + identity.Messages.Update, + identity.Created, + identity.Updated, + ), + func() { + s.callbacks.UUIDCollectionNSEvent(database.CollectionIdentities, fftypes.ChangeEventTypeCreated, identity.Namespace, identity.ID) + }, requestConflictEmptyResult) + return err +} + +func (s *SQLCommon) UpsertIdentity(ctx context.Context, identity *fftypes.Identity, optimization database.UpsertOptimization) (err error) { + ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + if err != nil { + return err + } + defer s.rollbackTx(ctx, tx, autoCommit) + + optimized := false + if optimization == database.UpsertOptimizationNew { + opErr := s.attemptIdentityInsert(ctx, tx, identity, true /* we want a failure here we can progress past */) + optimized = opErr == nil + } else if optimization == database.UpsertOptimizationExisting { + rowsAffected, opErr := s.attemptIdentityUpdate(ctx, tx, identity) + optimized = opErr == nil && rowsAffected == 1 + } + + if !optimized { + // Do a select within the transaction to detemine if the UUID already exists + msgRows, _, err := s.queryTx(ctx, tx, + sq.Select("id"). + From("identities"). + Where(sq.Eq{"id": identity.ID}), + ) + if err != nil { + return err + } + existing := msgRows.Next() + msgRows.Close() + + if existing { + // Update the identity + if _, err = s.attemptIdentityUpdate(ctx, tx, identity); err != nil { + return err + } + } else { + if err = s.attemptIdentityInsert(ctx, tx, identity, false); err != nil { + return err + } + } + } + + return s.commitTx(ctx, tx, autoCommit) +} + +func (s *SQLCommon) identityResult(ctx context.Context, row *sql.Rows) (*fftypes.Identity, error) { + identity := fftypes.Identity{} + err := row.Scan( + &identity.ID, + &identity.DID, + &identity.Parent, + &identity.Type, + &identity.Namespace, + &identity.Name, + &identity.Description, + &identity.Profile, + &identity.Messages.Claim, + &identity.Messages.Verification, + &identity.Messages.Update, + &identity.Created, + &identity.Updated, + ) + if err != nil { + return nil, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, "identities") + } + return &identity, nil +} + +func (s *SQLCommon) getIdentityPred(ctx context.Context, desc string, pred interface{}) (identity *fftypes.Identity, err error) { + + rows, _, err := s.query(ctx, + sq.Select(identityColumns...). + From("identities"). + Where(pred), + ) + if err != nil { + return nil, err + } + defer rows.Close() + + if !rows.Next() { + log.L(ctx).Debugf("Identity '%s' not found", desc) + return nil, nil + } + + return s.identityResult(ctx, rows) +} + +func (s *SQLCommon) GetIdentityByName(ctx context.Context, iType fftypes.IdentityType, namespace, name string) (identity *fftypes.Identity, err error) { + return s.getIdentityPred(ctx, name, sq.Eq{"itype": iType, "namespace": namespace, "name": name}) +} + +func (s *SQLCommon) GetIdentityByDID(ctx context.Context, did string) (identity *fftypes.Identity, err error) { + return s.getIdentityPred(ctx, did, sq.Eq{"did": did}) +} + +func (s *SQLCommon) GetIdentityByID(ctx context.Context, id *fftypes.UUID) (identity *fftypes.Identity, err error) { + return s.getIdentityPred(ctx, id.String(), sq.Eq{"id": id}) +} + +func (s *SQLCommon) GetIdentities(ctx context.Context, filter database.Filter) (identities []*fftypes.Identity, fr *database.FilterResult, err error) { + + query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(identityColumns...).From("identities"), filter, identityFilterFieldMap, []interface{}{"sequence"}) + if err != nil { + return nil, nil, err + } + + rows, tx, err := s.query(ctx, query) + if err != nil { + return nil, nil, err + } + defer rows.Close() + + identities = []*fftypes.Identity{} + for rows.Next() { + d, err := s.identityResult(ctx, rows) + if err != nil { + return nil, nil, err + } + identities = append(identities, d) + } + + return identities, s.queryRes(ctx, tx, "identities", fop, fi), err + +} + +func (s *SQLCommon) UpdateIdentity(ctx context.Context, id *fftypes.UUID, update database.Update) (err error) { + + ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + if err != nil { + return err + } + defer s.rollbackTx(ctx, tx, autoCommit) + + query, err := s.buildUpdate(sq.Update("identities"), update, identityFilterFieldMap) + if err != nil { + return err + } + query = query.Where(sq.Eq{"id": id}) + + _, err = s.updateTx(ctx, tx, query, nil /* no change events for filter based updates */) + if err != nil { + return err + } + + return s.commitTx(ctx, tx, autoCommit) +} diff --git a/internal/database/sqlcommon/identity_sql_test.go b/internal/database/sqlcommon/identity_sql_test.go new file mode 100644 index 0000000000..b22a42ef9a --- /dev/null +++ b/internal/database/sqlcommon/identity_sql_test.go @@ -0,0 +1,296 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.identity/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sqlcommon + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" +) + +func TestIdentitiesE2EWithDB(t *testing.T) { + log.SetLevel("debug") + + s, cleanup := newSQLiteTestProvider(t) + defer cleanup() + ctx := context.Background() + + // Create a new identity entry + identityID := fftypes.NewUUID() + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: identityID, + DID: "did:firefly:/ns/ns1/1", + Parent: fftypes.NewUUID(), + Type: fftypes.IdentityTypeCustom, + Namespace: "ns1", + Name: "identity1", + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: "Identity One", + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + + s.callbacks.On("UUIDCollectionNSEvent", database.CollectionIdentities, fftypes.ChangeEventTypeCreated, "ns1", identityID).Return() + s.callbacks.On("UUIDCollectionNSEvent", database.CollectionIdentities, fftypes.ChangeEventTypeUpdated, "ns2", identityID).Return() + + err := s.UpsertIdentity(ctx, identity, database.UpsertOptimizationNew) + assert.NoError(t, err) + + // Check we get the exact same identity back + identityRead, err := s.GetIdentityByID(ctx, identity.ID) + assert.NoError(t, err) + assert.NotNil(t, identityRead) + identityJson, _ := json.Marshal(&identity) + identityReadJson, _ := json.Marshal(&identityRead) + assert.Equal(t, string(identityJson), string(identityReadJson)) + + // Update the identity (this is testing what's possible at the database layer, + // and does not account for the verification that happens at the higher level) + identityUpdated := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: identityID, + DID: "did:firefly:/nodes/2", + Parent: fftypes.NewUUID(), + Type: fftypes.IdentityTypeNode, + Namespace: "ns2", + Name: "identity2", + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: "Identity Two", + Profile: fftypes.JSONObject{"some": "value"}, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + Verification: fftypes.NewUUID(), + Update: fftypes.NewUUID(), + }, + Created: identity.Created, + } + err = s.UpsertIdentity(context.Background(), identityUpdated, database.UpsertOptimizationExisting) + assert.NoError(t, err) + + // Check we get the exact same data back - note the removal of one of the identity elements + identityRead, err = s.GetIdentityByName(ctx, identityUpdated.Type, identityUpdated.Namespace, identityUpdated.Name) + assert.NoError(t, err) + identityJson, _ = json.Marshal(&identityUpdated) + identityReadJson, _ = json.Marshal(&identityRead) + assert.Equal(t, string(identityJson), string(identityReadJson)) + + // Query back the identity + fb := database.IdentityQueryFactory.NewFilter(ctx) + filter := fb.And( + fb.Eq("description", string(identityUpdated.Description)), + fb.Eq("did", identityUpdated.DID), + ) + identityRes, res, err := s.GetIdentities(ctx, filter.Count(true)) + assert.NoError(t, err) + assert.Equal(t, 1, len(identityRes)) + assert.Equal(t, int64(1), *res.TotalCount) + identityReadJson, _ = json.Marshal(identityRes[0]) + assert.Equal(t, string(identityJson), string(identityReadJson)) + + // Update + updateTime := fftypes.Now() + up := database.IdentityQueryFactory.NewUpdate(ctx).Set("created", updateTime) + err = s.UpdateIdentity(ctx, identityUpdated.ID, up) + assert.NoError(t, err) + + // Test find updated value + filter = fb.And( + fb.Eq("did", identityUpdated.DID), + fb.Eq("messages.claim", identityUpdated.Messages.Claim), + fb.Eq("messages.verification", identityUpdated.Messages.Verification), + fb.Eq("messages.update", identityUpdated.Messages.Update), + fb.Eq("created", updateTime.String()), + ) + identities, _, err := s.GetIdentities(ctx, filter) + assert.NoError(t, err) + assert.Equal(t, 1, len(identities)) + + s.callbacks.AssertExpectations(t) +} + +func TestUpsertIdentityFailBegin(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) + err := s.UpsertIdentity(context.Background(), &fftypes.Identity{}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10114", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertIdentityFailSelect(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertIdentity(context.Background(), &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + }, + }, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertIdentityFailInsert(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{})) + mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertIdentity(context.Background(), &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + }, + }, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10116", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertIdentityFailUpdate(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"}). + AddRow("id1")) + mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertIdentity(context.Background(), &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + }, + }, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10117", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertIdentityFailCommit(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"})) + mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) + mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) + err := s.UpsertIdentity(context.Background(), &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + }, + }, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10119", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityByIDSelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetIdentityByID(context.Background(), fftypes.NewUUID()) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityByNameSelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetIdentityByName(context.Background(), fftypes.IdentityTypeOrg, "ff_system", "org1") + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityByIdentitySelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetIdentityByDID(context.Background(), "did:firefly:org/org1") + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityByIDNotFound(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity", "identity", "identity"})) + msg, err := s.GetIdentityByID(context.Background(), fftypes.NewUUID()) + assert.NoError(t, err) + assert.Nil(t, msg) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityByIDScanFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"}).AddRow("only one")) + _, err := s.GetIdentityByID(context.Background(), fftypes.NewUUID()) + assert.Regexp(t, "FF10121", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityQueryFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + f := database.IdentityQueryFactory.NewFilter(context.Background()).Eq("did", "") + _, _, err := s.GetIdentities(context.Background(), f) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetIdentityBuildQueryFail(t *testing.T) { + s, _ := newMockProvider().init() + f := database.IdentityQueryFactory.NewFilter(context.Background()).Eq("did", map[bool]bool{true: false}) + _, _, err := s.GetIdentities(context.Background(), f) + assert.Regexp(t, "FF10149.*type", err) +} + +func TestGetIdentityReadMessageFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"did"}).AddRow("only one")) + f := database.IdentityQueryFactory.NewFilter(context.Background()).Eq("did", "") + _, _, err := s.GetIdentities(context.Background(), f) + assert.Regexp(t, "FF10121", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestIdentityUpdateBeginFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) + u := database.IdentityQueryFactory.NewUpdate(context.Background()).Set("did", "anything") + err := s.UpdateIdentity(context.Background(), fftypes.NewUUID(), u) + assert.Regexp(t, "FF10114", err) +} + +func TestIdentityUpdateBuildQueryFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + u := database.IdentityQueryFactory.NewUpdate(context.Background()).Set("did", map[bool]bool{true: false}) + err := s.UpdateIdentity(context.Background(), fftypes.NewUUID(), u) + assert.Regexp(t, "FF10149.*did", err) +} + +func TestIdentityUpdateFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + u := database.IdentityQueryFactory.NewUpdate(context.Background()).Set("did", fftypes.NewUUID()) + err := s.UpdateIdentity(context.Background(), fftypes.NewUUID(), u) + assert.Regexp(t, "FF10117", err) +} diff --git a/internal/database/sqlcommon/message_sql_test.go b/internal/database/sqlcommon/message_sql_test.go index 72622e8a9c..1bb8bf5dbd 100644 --- a/internal/database/sqlcommon/message_sql_test.go +++ b/internal/database/sqlcommon/message_sql_test.go @@ -49,7 +49,7 @@ func TestUpsertE2EWithDB(t *testing.T) { ID: msgID, CID: nil, Type: fftypes.MessageTypeBroadcast, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Key: "0x12345", Author: "did:firefly:org/abcd", }, @@ -95,7 +95,7 @@ func TestUpsertE2EWithDB(t *testing.T) { ID: msgID, CID: cid, Type: fftypes.MessageTypeBroadcast, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Key: "0x12345", Author: "did:firefly:org/abcd", }, diff --git a/internal/database/sqlcommon/nextpin_sql.go b/internal/database/sqlcommon/nextpin_sql.go index eccc8ff91b..65524f075a 100644 --- a/internal/database/sqlcommon/nextpin_sql.go +++ b/internal/database/sqlcommon/nextpin_sql.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -155,7 +155,7 @@ func (s *SQLCommon) UpdateNextPin(ctx context.Context, sequence int64, update da } defer s.rollbackTx(ctx, tx, autoCommit) - query, err := s.buildUpdate(sq.Update("nextpins"), update, nodeFilterFieldMap) + query, err := s.buildUpdate(sq.Update("nextpins"), update, pinFilterFieldMap) if err != nil { return err } diff --git a/internal/database/sqlcommon/node_sql.go b/internal/database/sqlcommon/node_sql.go deleted file mode 100644 index 8c6f218ac4..0000000000 --- a/internal/database/sqlcommon/node_sql.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "database/sql" - "fmt" - - sq "github.com/Masterminds/squirrel" - "github.com/hyperledger/firefly/internal/i18n" - "github.com/hyperledger/firefly/internal/log" - "github.com/hyperledger/firefly/pkg/database" - "github.com/hyperledger/firefly/pkg/fftypes" -) - -var ( - nodeColumns = []string{ - "id", - "message_id", - "owner", - "name", - "description", - "dx_peer", - "dx_endpoint", - "created", - } - nodeFilterFieldMap = map[string]string{ - "message": "message_id", - "dx.peer": "dx_peer", - "dx.endpoint": "dx_endpoint", - } -) - -func (s *SQLCommon) UpsertNode(ctx context.Context, node *fftypes.Node, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) - if err != nil { - return err - } - defer s.rollbackTx(ctx, tx, autoCommit) - - existing := false - if allowExisting { - // Do a select within the transaction to detemine if the UUID already exists - nodeRows, _, err := s.queryTx(ctx, tx, - sq.Select("id"). - From("nodes"). - Where(sq.Eq{ - "owner": node.Owner, - "name": node.Name, - }), - ) - if err != nil { - return err - } - existing = nodeRows.Next() - - if existing { - var id fftypes.UUID - _ = nodeRows.Scan(&id) - if node.ID != nil { - if *node.ID != id { - nodeRows.Close() - return database.IDMismatch - } - } - node.ID = &id // Update on returned object - } - nodeRows.Close() - } - - if existing { - // Update the node - if _, err = s.updateTx(ctx, tx, - sq.Update("nodes"). - // Note we do not update ID - Set("message_id", node.Message). - Set("owner", node.Owner). - Set("name", node.Name). - Set("description", node.Description). - Set("dx_peer", node.DX.Peer). - Set("dx_endpoint", node.DX.Endpoint). - Set("created", node.Created). - Where(sq.Eq{"id": node.ID}), - func() { - s.callbacks.UUIDCollectionEvent(database.CollectionNodes, fftypes.ChangeEventTypeUpdated, node.ID) - }, - ); err != nil { - return err - } - } else { - if _, err = s.insertTx(ctx, tx, - sq.Insert("nodes"). - Columns(nodeColumns...). - Values( - node.ID, - node.Message, - node.Owner, - node.Name, - node.Description, - node.DX.Peer, - node.DX.Endpoint, - node.Created, - ), - func() { - s.callbacks.UUIDCollectionEvent(database.CollectionNodes, fftypes.ChangeEventTypeCreated, node.ID) - }, - ); err != nil { - return err - } - } - - return s.commitTx(ctx, tx, autoCommit) -} - -func (s *SQLCommon) nodeResult(ctx context.Context, row *sql.Rows) (*fftypes.Node, error) { - node := fftypes.Node{} - err := row.Scan( - &node.ID, - &node.Message, - &node.Owner, - &node.Name, - &node.Description, - &node.DX.Peer, - &node.DX.Endpoint, - &node.Created, - ) - if err != nil { - return nil, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, "nodes") - } - return &node, nil -} - -func (s *SQLCommon) getNodePred(ctx context.Context, desc string, pred interface{}) (message *fftypes.Node, err error) { - - rows, _, err := s.query(ctx, - sq.Select(nodeColumns...). - From("nodes"). - Where(pred), - ) - if err != nil { - return nil, err - } - defer rows.Close() - - if !rows.Next() { - log.L(ctx).Debugf("Node '%s' not found", desc) - return nil, nil - } - - node, err := s.nodeResult(ctx, rows) - if err != nil { - return nil, err - } - - return node, nil -} - -func (s *SQLCommon) GetNode(ctx context.Context, owner, name string) (message *fftypes.Node, err error) { - return s.getNodePred(ctx, fmt.Sprintf("%s/%s", owner, name), sq.Eq{"owner": owner, "name": name}) -} - -func (s *SQLCommon) GetNodeByID(ctx context.Context, id *fftypes.UUID) (message *fftypes.Node, err error) { - return s.getNodePred(ctx, id.String(), sq.Eq{"id": id}) -} - -func (s *SQLCommon) GetNodes(ctx context.Context, filter database.Filter) (message []*fftypes.Node, fr *database.FilterResult, err error) { - - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(nodeColumns...).From("nodes"), filter, nodeFilterFieldMap, []interface{}{"sequence"}) - if err != nil { - return nil, nil, err - } - - rows, tx, err := s.query(ctx, query) - if err != nil { - return nil, nil, err - } - defer rows.Close() - - node := []*fftypes.Node{} - for rows.Next() { - d, err := s.nodeResult(ctx, rows) - if err != nil { - return nil, nil, err - } - node = append(node, d) - } - - return node, s.queryRes(ctx, tx, "nodes", fop, fi), err - -} - -func (s *SQLCommon) UpdateNode(ctx context.Context, id *fftypes.UUID, update database.Update) (err error) { - - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) - if err != nil { - return err - } - defer s.rollbackTx(ctx, tx, autoCommit) - - query, err := s.buildUpdate(sq.Update("nodes"), update, nodeFilterFieldMap) - if err != nil { - return err - } - query = query.Where(sq.Eq{"id": id}) - - _, err = s.updateTx(ctx, tx, query, nil /* no change events for filter based updates */) - if err != nil { - return err - } - - return s.commitTx(ctx, tx, autoCommit) -} diff --git a/internal/database/sqlcommon/node_sql_test.go b/internal/database/sqlcommon/node_sql_test.go deleted file mode 100644 index 15ab487962..0000000000 --- a/internal/database/sqlcommon/node_sql_test.go +++ /dev/null @@ -1,253 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "encoding/json" - "fmt" - "testing" - - "github.com/DATA-DOG/go-sqlmock" - "github.com/hyperledger/firefly/internal/log" - "github.com/hyperledger/firefly/pkg/database" - "github.com/hyperledger/firefly/pkg/fftypes" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" -) - -func TestNodesE2EWithDB(t *testing.T) { - log.SetLevel("debug") - - s, cleanup := newSQLiteTestProvider(t) - defer cleanup() - ctx := context.Background() - - // Create a new node entry - nodeID := fftypes.NewUUID() - node := &fftypes.Node{ - ID: nodeID, - Message: fftypes.NewUUID(), - Owner: "0x23456", - Name: "node1", - Created: fftypes.Now(), - } - - s.callbacks.On("UUIDCollectionEvent", database.CollectionNodes, fftypes.ChangeEventTypeCreated, nodeID, mock.Anything).Return() - s.callbacks.On("UUIDCollectionEvent", database.CollectionNodes, fftypes.ChangeEventTypeUpdated, nodeID, mock.Anything).Return() - - err := s.UpsertNode(ctx, node, true) - assert.NoError(t, err) - - // Check we get the exact same node back - nodeRead, err := s.GetNode(ctx, node.Owner, node.Name) - assert.NoError(t, err) - assert.NotNil(t, nodeRead) - nodeJson, _ := json.Marshal(&node) - nodeReadJson, _ := json.Marshal(&nodeRead) - assert.Equal(t, string(nodeJson), string(nodeReadJson)) - - // Rejects attempt to update ID - err = s.UpsertNode(context.Background(), &fftypes.Node{ - ID: fftypes.NewUUID(), - Owner: "0x23456", - Name: "node1", - }, true) - assert.Equal(t, database.IDMismatch, err) - - // Update the node (this is testing what's possible at the database layer, - // and does not account for the verification that happens at the higher level) - nodeUpdated := &fftypes.Node{ - ID: nil, // as long as we don't specify one we're fine - Message: fftypes.NewUUID(), - Owner: "0x23456", - Name: "node1", - Description: "node1", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - Created: fftypes.Now(), - } - err = s.UpsertNode(context.Background(), nodeUpdated, true) - assert.NoError(t, err) - - // Check we get the exact same data back - note the removal of one of the node elements - nodeRead, err = s.GetNode(ctx, node.Owner, node.Name) - assert.NoError(t, err) - nodeJson, _ = json.Marshal(&nodeUpdated) - nodeReadJson, _ = json.Marshal(&nodeRead) - assert.Equal(t, string(nodeJson), string(nodeReadJson)) - - // Query back the node - fb := database.NodeQueryFactory.NewFilter(ctx) - filter := fb.And( - fb.Eq("description", string(nodeUpdated.Description)), - fb.Eq("name", nodeUpdated.Name), - ) - nodeRes, res, err := s.GetNodes(ctx, filter.Count(true)) - assert.NoError(t, err) - assert.Equal(t, 1, len(nodeRes)) - assert.Equal(t, int64(1), *res.TotalCount) - nodeReadJson, _ = json.Marshal(nodeRes[0]) - assert.Equal(t, string(nodeJson), string(nodeReadJson)) - - // Update - updateTime := fftypes.Now() - up := database.NodeQueryFactory.NewUpdate(ctx).Set("created", updateTime) - err = s.UpdateNode(ctx, nodeUpdated.ID, up) - assert.NoError(t, err) - - // Test find updated value - filter = fb.And( - fb.Eq("name", nodeUpdated.Name), - fb.Eq("created", updateTime.String()), - ) - nodes, _, err := s.GetNodes(ctx, filter) - assert.NoError(t, err) - assert.Equal(t, 1, len(nodes)) - - s.callbacks.AssertExpectations(t) -} - -func TestUpsertNodeFailBegin(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) - err := s.UpsertNode(context.Background(), &fftypes.Node{}, true) - assert.Regexp(t, "FF10114", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertNodeFailSelect(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertNode(context.Background(), &fftypes.Node{Name: "node1"}, true) - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertNodeFailInsert(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{})) - mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertNode(context.Background(), &fftypes.Node{Name: "node1"}, true) - assert.Regexp(t, "FF10116", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertNodeFailUpdate(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"name"}). - AddRow("id1")) - mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertNode(context.Background(), &fftypes.Node{Name: "node1"}, true) - assert.Regexp(t, "FF10117", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertNodeFailCommit(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"name"})) - mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) - mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) - err := s.UpsertNode(context.Background(), &fftypes.Node{Name: "node1"}, true) - assert.Regexp(t, "FF10119", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetNodeByIDSelectFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - _, err := s.GetNode(context.Background(), "owner1", "node1") - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetNodeByIDNotFound(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"name", "node", "name"})) - msg, err := s.GetNode(context.Background(), "owner1", "node1") - assert.NoError(t, err) - assert.Nil(t, msg) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetNodeByIDScanFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"name"}).AddRow("only one")) - _, err := s.GetNodeByID(context.Background(), fftypes.NewUUID()) - assert.Regexp(t, "FF10121", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetNodeQueryFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - f := database.NodeQueryFactory.NewFilter(context.Background()).Eq("name", "") - _, _, err := s.GetNodes(context.Background(), f) - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetNodeBuildQueryFail(t *testing.T) { - s, _ := newMockProvider().init() - f := database.NodeQueryFactory.NewFilter(context.Background()).Eq("name", map[bool]bool{true: false}) - _, _, err := s.GetNodes(context.Background(), f) - assert.Regexp(t, "FF10149.*type", err) -} - -func TestGetNodeReadMessageFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"name"}).AddRow("only one")) - f := database.NodeQueryFactory.NewFilter(context.Background()).Eq("name", "") - _, _, err := s.GetNodes(context.Background(), f) - assert.Regexp(t, "FF10121", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestNodeUpdateBeginFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) - u := database.NodeQueryFactory.NewUpdate(context.Background()).Set("name", "anything") - err := s.UpdateNode(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) -} - -func TestNodeUpdateBuildQueryFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - u := database.NodeQueryFactory.NewUpdate(context.Background()).Set("name", map[bool]bool{true: false}) - err := s.UpdateNode(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10149.*name", err) -} - -func TestNodeUpdateFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - u := database.NodeQueryFactory.NewUpdate(context.Background()).Set("name", fftypes.NewUUID()) - err := s.UpdateNode(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) -} diff --git a/internal/database/sqlcommon/organization_sql.go b/internal/database/sqlcommon/organization_sql.go deleted file mode 100644 index 230d822bf0..0000000000 --- a/internal/database/sqlcommon/organization_sql.go +++ /dev/null @@ -1,224 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "database/sql" - - sq "github.com/Masterminds/squirrel" - "github.com/hyperledger/firefly/internal/i18n" - "github.com/hyperledger/firefly/internal/log" - "github.com/hyperledger/firefly/pkg/database" - "github.com/hyperledger/firefly/pkg/fftypes" -) - -var ( - organizationColumns = []string{ - "id", - "message_id", - "name", - "parent", - "identity", - "description", - "profile", - "created", - } - organizationFilterFieldMap = map[string]string{ - "message": "message_id", - } -) - -func (s *SQLCommon) UpsertOrganization(ctx context.Context, organization *fftypes.Organization, allowExisting bool) (err error) { - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) - if err != nil { - return err - } - defer s.rollbackTx(ctx, tx, autoCommit) - - existing := false - if allowExisting { - // Do a select within the transaction to detemine if the UUID already exists - organizationRows, _, err := s.queryTx(ctx, tx, - sq.Select("id"). - From("orgs"). - Where(sq.Eq{"identity": organization.Identity}), - ) - if err != nil { - return err - } - existing = organizationRows.Next() - - if existing { - var id fftypes.UUID - _ = organizationRows.Scan(&id) - if organization.ID != nil { - if *organization.ID != id { - organizationRows.Close() - return database.IDMismatch - } - } - organization.ID = &id // Update on returned object - } - organizationRows.Close() - } - - if existing { - // Update the organization - if _, err = s.updateTx(ctx, tx, - sq.Update("orgs"). - // Note we do not update ID - Set("message_id", organization.Message). - Set("parent", organization.Parent). - Set("identity", organization.Identity). - Set("description", organization.Description). - Set("profile", organization.Profile). - Set("created", organization.Created). - Where(sq.Eq{"identity": organization.Identity}), - func() { - s.callbacks.UUIDCollectionEvent(database.CollectionOrganizations, fftypes.ChangeEventTypeUpdated, organization.ID) - }, - ); err != nil { - return err - } - } else { - if _, err = s.insertTx(ctx, tx, - sq.Insert("orgs"). - Columns(organizationColumns...). - Values( - organization.ID, - organization.Message, - organization.Name, - organization.Parent, - organization.Identity, - organization.Description, - organization.Profile, - organization.Created, - ), - func() { - s.callbacks.UUIDCollectionEvent(database.CollectionOrganizations, fftypes.ChangeEventTypeCreated, organization.ID) - }, - ); err != nil { - return err - } - } - - return s.commitTx(ctx, tx, autoCommit) -} - -func (s *SQLCommon) organizationResult(ctx context.Context, row *sql.Rows) (*fftypes.Organization, error) { - organization := fftypes.Organization{} - err := row.Scan( - &organization.ID, - &organization.Message, - &organization.Name, - &organization.Parent, - &organization.Identity, - &organization.Description, - &organization.Profile, - &organization.Created, - ) - if err != nil { - return nil, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, "orgs") - } - return &organization, nil -} - -func (s *SQLCommon) getOrganizationPred(ctx context.Context, desc string, pred interface{}) (message *fftypes.Organization, err error) { - - rows, _, err := s.query(ctx, - sq.Select(organizationColumns...). - From("orgs"). - Where(pred), - ) - if err != nil { - return nil, err - } - defer rows.Close() - - if !rows.Next() { - log.L(ctx).Debugf("Organization '%s' not found", desc) - return nil, nil - } - - organization, err := s.organizationResult(ctx, rows) - if err != nil { - return nil, err - } - - return organization, nil -} - -func (s *SQLCommon) GetOrganizationByName(ctx context.Context, name string) (message *fftypes.Organization, err error) { - return s.getOrganizationPred(ctx, name, sq.Eq{"name": name}) -} - -func (s *SQLCommon) GetOrganizationByIdentity(ctx context.Context, identity string) (message *fftypes.Organization, err error) { - return s.getOrganizationPred(ctx, identity, sq.Eq{"identity": identity}) -} - -func (s *SQLCommon) GetOrganizationByID(ctx context.Context, id *fftypes.UUID) (message *fftypes.Organization, err error) { - return s.getOrganizationPred(ctx, id.String(), sq.Eq{"id": id}) -} - -func (s *SQLCommon) GetOrganizations(ctx context.Context, filter database.Filter) (message []*fftypes.Organization, fr *database.FilterResult, err error) { - - query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(organizationColumns...).From("orgs"), filter, organizationFilterFieldMap, []interface{}{"sequence"}) - if err != nil { - return nil, nil, err - } - - rows, tx, err := s.query(ctx, query) - if err != nil { - return nil, nil, err - } - defer rows.Close() - - organization := []*fftypes.Organization{} - for rows.Next() { - d, err := s.organizationResult(ctx, rows) - if err != nil { - return nil, nil, err - } - organization = append(organization, d) - } - - return organization, s.queryRes(ctx, tx, "orgs", fop, fi), err - -} - -func (s *SQLCommon) UpdateOrganization(ctx context.Context, id *fftypes.UUID, update database.Update) (err error) { - - ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) - if err != nil { - return err - } - defer s.rollbackTx(ctx, tx, autoCommit) - - query, err := s.buildUpdate(sq.Update("orgs"), update, organizationFilterFieldMap) - if err != nil { - return err - } - query = query.Where(sq.Eq{"id": id}) - - _, err = s.updateTx(ctx, tx, query, nil /* no change events for filter based updates */) - if err != nil { - return err - } - - return s.commitTx(ctx, tx, autoCommit) -} diff --git a/internal/database/sqlcommon/organization_sql_test.go b/internal/database/sqlcommon/organization_sql_test.go deleted file mode 100644 index e1ff563071..0000000000 --- a/internal/database/sqlcommon/organization_sql_test.go +++ /dev/null @@ -1,265 +0,0 @@ -// Copyright © 2021 Kaleido, Inc. -// -// SPDX-License-Identifier: Apache-2.0 -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package sqlcommon - -import ( - "context" - "encoding/json" - "fmt" - "testing" - - "github.com/DATA-DOG/go-sqlmock" - "github.com/hyperledger/firefly/internal/log" - "github.com/hyperledger/firefly/pkg/database" - "github.com/hyperledger/firefly/pkg/fftypes" - "github.com/stretchr/testify/assert" -) - -func TestOrganizationsE2EWithDB(t *testing.T) { - log.SetLevel("debug") - - s, cleanup := newSQLiteTestProvider(t) - defer cleanup() - ctx := context.Background() - - // Create a new organization entry - orgID := fftypes.NewUUID() - organization := &fftypes.Organization{ - ID: orgID, - Message: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Created: fftypes.Now(), - } - - s.callbacks.On("UUIDCollectionEvent", database.CollectionOrganizations, fftypes.ChangeEventTypeCreated, orgID).Return() - s.callbacks.On("UUIDCollectionEvent", database.CollectionOrganizations, fftypes.ChangeEventTypeUpdated, orgID).Return() - - err := s.UpsertOrganization(ctx, organization, true) - assert.NoError(t, err) - - // Check we get the exact same organization back - organizationRead, err := s.GetOrganizationByIdentity(ctx, organization.Identity) - assert.NoError(t, err) - assert.NotNil(t, organizationRead) - organizationJson, _ := json.Marshal(&organization) - organizationReadJson, _ := json.Marshal(&organizationRead) - assert.Equal(t, string(organizationJson), string(organizationReadJson)) - - // Rejects attempt to update ID - err = s.UpsertOrganization(context.Background(), &fftypes.Organization{ - ID: fftypes.NewUUID(), - Identity: "0x12345", - }, true) - assert.Equal(t, database.IDMismatch, err) - - // Update the organization (this is testing what's possible at the database layer, - // and does not account for the verification that happens at the higher level) - organizationUpdated := &fftypes.Organization{ - ID: nil, // as long as we don't specify one we're fine - Message: fftypes.NewUUID(), - Name: "org1", - Parent: "0x23456", - Identity: "0x12345", - Description: "organization1", - Profile: fftypes.JSONObject{"some": "info"}, - Created: fftypes.Now(), - } - err = s.UpsertOrganization(context.Background(), organizationUpdated, true) - assert.NoError(t, err) - - // Check we get the exact same data back - note the removal of one of the organization elements - organizationRead, err = s.GetOrganizationByName(ctx, organization.Name) - assert.NoError(t, err) - organizationJson, _ = json.Marshal(&organizationUpdated) - organizationReadJson, _ = json.Marshal(&organizationRead) - assert.Equal(t, string(organizationJson), string(organizationReadJson)) - - // Query back the organization - fb := database.OrganizationQueryFactory.NewFilter(ctx) - filter := fb.And( - fb.Eq("description", string(organizationUpdated.Description)), - fb.Eq("identity", organizationUpdated.Identity), - ) - organizationRes, res, err := s.GetOrganizations(ctx, filter.Count(true)) - assert.NoError(t, err) - assert.Equal(t, 1, len(organizationRes)) - assert.Equal(t, int64(1), *res.TotalCount) - organizationReadJson, _ = json.Marshal(organizationRes[0]) - assert.Equal(t, string(organizationJson), string(organizationReadJson)) - - // Update - updateTime := fftypes.Now() - up := database.OrganizationQueryFactory.NewUpdate(ctx).Set("created", updateTime) - err = s.UpdateOrganization(ctx, organizationUpdated.ID, up) - assert.NoError(t, err) - - // Test find updated value - filter = fb.And( - fb.Eq("identity", organizationUpdated.Identity), - fb.Eq("created", updateTime.String()), - ) - organizations, _, err := s.GetOrganizations(ctx, filter) - assert.NoError(t, err) - assert.Equal(t, 1, len(organizations)) - - s.callbacks.AssertExpectations(t) -} - -func TestUpsertOrganizationFailBegin(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) - err := s.UpsertOrganization(context.Background(), &fftypes.Organization{}, true) - assert.Regexp(t, "FF10114", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertOrganizationFailSelect(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertOrganization(context.Background(), &fftypes.Organization{Identity: "id1"}, true) - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertOrganizationFailInsert(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{})) - mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertOrganization(context.Background(), &fftypes.Organization{Identity: "id1"}, true) - assert.Regexp(t, "FF10116", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertOrganizationFailUpdate(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"}). - AddRow("id1")) - mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - err := s.UpsertOrganization(context.Background(), &fftypes.Organization{Identity: "id1"}, true) - assert.Regexp(t, "FF10117", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestUpsertOrganizationFailCommit(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"})) - mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) - mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) - err := s.UpsertOrganization(context.Background(), &fftypes.Organization{Identity: "id1"}, true) - assert.Regexp(t, "FF10119", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationByIDSelectFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - _, err := s.GetOrganizationByID(context.Background(), fftypes.NewUUID()) - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationByNameSelectFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - _, err := s.GetOrganizationByName(context.Background(), "org1") - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationByIdentitySelectFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - _, err := s.GetOrganizationByIdentity(context.Background(), "id1") - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationByIDNotFound(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity", "organization", "identity"})) - msg, err := s.GetOrganizationByID(context.Background(), fftypes.NewUUID()) - assert.NoError(t, err) - assert.Nil(t, msg) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationByIDScanFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"}).AddRow("only one")) - _, err := s.GetOrganizationByID(context.Background(), fftypes.NewUUID()) - assert.Regexp(t, "FF10121", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationQueryFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) - f := database.OrganizationQueryFactory.NewFilter(context.Background()).Eq("identity", "") - _, _, err := s.GetOrganizations(context.Background(), f) - assert.Regexp(t, "FF10115", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestGetOrganizationBuildQueryFail(t *testing.T) { - s, _ := newMockProvider().init() - f := database.OrganizationQueryFactory.NewFilter(context.Background()).Eq("identity", map[bool]bool{true: false}) - _, _, err := s.GetOrganizations(context.Background(), f) - assert.Regexp(t, "FF10149.*type", err) -} - -func TestGetOrganizationReadMessageFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"identity"}).AddRow("only one")) - f := database.OrganizationQueryFactory.NewFilter(context.Background()).Eq("identity", "") - _, _, err := s.GetOrganizations(context.Background(), f) - assert.Regexp(t, "FF10121", err) - assert.NoError(t, mock.ExpectationsWereMet()) -} - -func TestOrganizationUpdateBeginFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) - u := database.OrganizationQueryFactory.NewUpdate(context.Background()).Set("identity", "anything") - err := s.UpdateOrganization(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10114", err) -} - -func TestOrganizationUpdateBuildQueryFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - u := database.OrganizationQueryFactory.NewUpdate(context.Background()).Set("identity", map[bool]bool{true: false}) - err := s.UpdateOrganization(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10149.*identity", err) -} - -func TestOrganizationUpdateFail(t *testing.T) { - s, mock := newMockProvider().init() - mock.ExpectBegin() - mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) - mock.ExpectRollback() - u := database.OrganizationQueryFactory.NewUpdate(context.Background()).Set("identity", fftypes.NewUUID()) - err := s.UpdateOrganization(context.Background(), fftypes.NewUUID(), u) - assert.Regexp(t, "FF10117", err) -} diff --git a/internal/database/sqlcommon/pin_sql.go b/internal/database/sqlcommon/pin_sql.go index f16857dbf6..8cd2008b4d 100644 --- a/internal/database/sqlcommon/pin_sql.go +++ b/internal/database/sqlcommon/pin_sql.go @@ -33,6 +33,7 @@ var ( "hash", "batch_id", "idx", + "signer", "dispatched", "created", } @@ -81,6 +82,7 @@ func (s *SQLCommon) UpsertPin(ctx context.Context, pin *fftypes.Pin) (err error) pin.Hash, pin.Batch, pin.Index, + pin.Signer, pin.Dispatched, pin.Created, ), @@ -103,6 +105,7 @@ func (s *SQLCommon) pinResult(ctx context.Context, row *sql.Rows) (*fftypes.Pin, &pin.Hash, &pin.Batch, &pin.Index, + &pin.Signer, &pin.Dispatched, &pin.Created, &pin.Sequence, diff --git a/internal/database/sqlcommon/pin_sql_test.go b/internal/database/sqlcommon/pin_sql_test.go index 5930682e82..b2510247fa 100644 --- a/internal/database/sqlcommon/pin_sql_test.go +++ b/internal/database/sqlcommon/pin_sql_test.go @@ -43,6 +43,7 @@ func TestPinsE2EWithDB(t *testing.T) { Batch: fftypes.NewUUID(), Index: 10, Created: fftypes.Now(), + Signer: "0x12345", Dispatched: false, } diff --git a/internal/database/sqlcommon/verifier_sql.go b/internal/database/sqlcommon/verifier_sql.go new file mode 100644 index 0000000000..1e219afd1f --- /dev/null +++ b/internal/database/sqlcommon/verifier_sql.go @@ -0,0 +1,212 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sqlcommon + +import ( + "context" + "database/sql" + + sq "github.com/Masterminds/squirrel" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +var ( + verifierColumns = []string{ + "hash", + "identity", + "vtype", + "namespace", + "value", + "created", + } + verifierFilterFieldMap = map[string]string{ + "type": "vtype", + } +) + +func (s *SQLCommon) attemptVerifierUpdate(ctx context.Context, tx *txWrapper, verifier *fftypes.Verifier) (int64, error) { + return s.updateTx(ctx, tx, + sq.Update("verifiers"). + Set("identity", verifier.Identity). + Set("vtype", verifier.Type). + Set("namespace", verifier.Namespace). + Set("value", verifier.Value). + Where(sq.Eq{ + "hash": verifier.Hash, + }), + func() { + s.callbacks.HashCollectionNSEvent(database.CollectionVerifiers, fftypes.ChangeEventTypeUpdated, verifier.Namespace, verifier.Hash) + }) +} + +func (s *SQLCommon) attemptVerifierInsert(ctx context.Context, tx *txWrapper, verifier *fftypes.Verifier, requestConflictEmptyResult bool) (err error) { + verifier.Created = fftypes.Now() + _, err = s.insertTxExt(ctx, tx, + sq.Insert("verifiers"). + Columns(verifierColumns...). + Values( + verifier.Hash, + verifier.Identity, + verifier.Type, + verifier.Namespace, + verifier.Value, + verifier.Created, + ), + func() { + s.callbacks.HashCollectionNSEvent(database.CollectionVerifiers, fftypes.ChangeEventTypeCreated, verifier.Namespace, verifier.Hash) + }, requestConflictEmptyResult) + return err +} + +func (s *SQLCommon) UpsertVerifier(ctx context.Context, verifier *fftypes.Verifier, optimization database.UpsertOptimization) (err error) { + ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + if err != nil { + return err + } + defer s.rollbackTx(ctx, tx, autoCommit) + + optimized := false + if optimization == database.UpsertOptimizationNew { + opErr := s.attemptVerifierInsert(ctx, tx, verifier, true /* we want a failure here we can progress past */) + optimized = opErr == nil + } else if optimization == database.UpsertOptimizationExisting { + rowsAffected, opErr := s.attemptVerifierUpdate(ctx, tx, verifier) + optimized = opErr == nil && rowsAffected == 1 + } + + if !optimized { + // Do a select within the transaction to detemine if the UUID already exists + msgRows, _, err := s.queryTx(ctx, tx, + sq.Select("hash"). + From("verifiers"). + Where(sq.Eq{"hash": verifier.Hash}), + ) + if err != nil { + return err + } + existing := msgRows.Next() + msgRows.Close() + + if existing { + // Update the verifier + if _, err = s.attemptVerifierUpdate(ctx, tx, verifier); err != nil { + return err + } + } else { + if err = s.attemptVerifierInsert(ctx, tx, verifier, false); err != nil { + return err + } + } + } + + return s.commitTx(ctx, tx, autoCommit) +} + +func (s *SQLCommon) verifierResult(ctx context.Context, row *sql.Rows) (*fftypes.Verifier, error) { + verifier := fftypes.Verifier{} + err := row.Scan( + &verifier.Hash, + &verifier.Identity, + &verifier.Type, + &verifier.Namespace, + &verifier.Value, + &verifier.Created, + ) + if err != nil { + return nil, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, "verifiers") + } + return &verifier, nil +} + +func (s *SQLCommon) getVerifierPred(ctx context.Context, desc string, pred interface{}) (verifier *fftypes.Verifier, err error) { + + rows, _, err := s.query(ctx, + sq.Select(verifierColumns...). + From("verifiers"). + Where(pred), + ) + if err != nil { + return nil, err + } + defer rows.Close() + + if !rows.Next() { + log.L(ctx).Debugf("Verifier '%s' not found", desc) + return nil, nil + } + + return s.verifierResult(ctx, rows) +} + +func (s *SQLCommon) GetVerifierByValue(ctx context.Context, vType fftypes.VerifierType, namespace, value string) (verifier *fftypes.Verifier, err error) { + return s.getVerifierPred(ctx, value, sq.Eq{"vtype": vType, "namespace": namespace, "value": value}) +} + +func (s *SQLCommon) GetVerifierByHash(ctx context.Context, hash *fftypes.Bytes32) (verifier *fftypes.Verifier, err error) { + return s.getVerifierPred(ctx, hash.String(), sq.Eq{"hash": hash}) +} + +func (s *SQLCommon) GetVerifiers(ctx context.Context, filter database.Filter) (verifiers []*fftypes.Verifier, fr *database.FilterResult, err error) { + + query, fop, fi, err := s.filterSelect(ctx, "", sq.Select(verifierColumns...).From("verifiers"), filter, verifierFilterFieldMap, []interface{}{"sequence"}) + if err != nil { + return nil, nil, err + } + + rows, tx, err := s.query(ctx, query) + if err != nil { + return nil, nil, err + } + defer rows.Close() + + verifiers = []*fftypes.Verifier{} + for rows.Next() { + d, err := s.verifierResult(ctx, rows) + if err != nil { + return nil, nil, err + } + verifiers = append(verifiers, d) + } + + return verifiers, s.queryRes(ctx, tx, "verifiers", fop, fi), err + +} + +func (s *SQLCommon) UpdateVerifier(ctx context.Context, hash *fftypes.Bytes32, update database.Update) (err error) { + + ctx, tx, autoCommit, err := s.beginOrUseTx(ctx) + if err != nil { + return err + } + defer s.rollbackTx(ctx, tx, autoCommit) + + query, err := s.buildUpdate(sq.Update("verifiers"), update, verifierFilterFieldMap) + if err != nil { + return err + } + query = query.Where(sq.Eq{"hash": hash}) + + _, err = s.updateTx(ctx, tx, query, nil /* no change events for filter based updates */) + if err != nil { + return err + } + + return s.commitTx(ctx, tx, autoCommit) +} diff --git a/internal/database/sqlcommon/verifier_sql_test.go b/internal/database/sqlcommon/verifier_sql_test.go new file mode 100644 index 0000000000..3d53b76f2c --- /dev/null +++ b/internal/database/sqlcommon/verifier_sql_test.go @@ -0,0 +1,259 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.verifier/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package sqlcommon + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" +) + +func TestVerifiersE2EWithDB(t *testing.T) { + log.SetLevel("debug") + + s, cleanup := newSQLiteTestProvider(t) + defer cleanup() + ctx := context.Background() + + // Create a new verifier entry + verifier := &fftypes.Verifier{ + Identity: fftypes.NewUUID(), + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }, + } + verifier.Seal() + + s.callbacks.On("HashCollectionNSEvent", database.CollectionVerifiers, fftypes.ChangeEventTypeCreated, "ns1", verifier.Hash).Return() + s.callbacks.On("HashCollectionNSEvent", database.CollectionVerifiers, fftypes.ChangeEventTypeUpdated, "ns1", verifier.Hash).Return() + + err := s.UpsertVerifier(ctx, verifier, database.UpsertOptimizationNew) + assert.NoError(t, err) + + // Check we get the exact same verifier back + verifierRead, err := s.GetVerifierByHash(ctx, verifier.Hash) + assert.NoError(t, err) + assert.NotNil(t, verifierRead) + verifierJson, _ := json.Marshal(&verifier) + verifierReadJson, _ := json.Marshal(&verifierRead) + assert.Equal(t, string(verifierJson), string(verifierReadJson)) + + // Update the verifier (this is testing what's possible at the database layer, + // and does not account for the verification that happens at the higher level) + verifierUpdated := &fftypes.Verifier{ + Identity: fftypes.NewUUID(), + Created: verifier.Created, + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }, + } + verifierUpdated.Seal() + err = s.UpsertVerifier(context.Background(), verifierUpdated, database.UpsertOptimizationExisting) + assert.NoError(t, err) + + // Check we get the exact same data back - note the removal of one of the verifier elements + verifierRead, err = s.GetVerifierByValue(ctx, verifierUpdated.Type, verifierUpdated.Namespace, verifierUpdated.Value) + assert.NoError(t, err) + verifierJson, _ = json.Marshal(&verifierUpdated) + verifierReadJson, _ = json.Marshal(&verifierRead) + assert.Equal(t, string(verifierJson), string(verifierReadJson)) + + // Query back the verifier + fb := database.VerifierQueryFactory.NewFilter(ctx) + filter := fb.And( + fb.Eq("value", string(verifierUpdated.Value)), + fb.Eq("namespace", verifierUpdated.Namespace), + ) + verifierRes, res, err := s.GetVerifiers(ctx, filter.Count(true)) + assert.NoError(t, err) + assert.Equal(t, 1, len(verifierRes)) + assert.Equal(t, int64(1), *res.TotalCount) + verifierReadJson, _ = json.Marshal(verifierRes[0]) + assert.Equal(t, string(verifierJson), string(verifierReadJson)) + + // Update + updateTime := fftypes.Now() + up := database.VerifierQueryFactory.NewUpdate(ctx).Set("created", updateTime) + err = s.UpdateVerifier(ctx, verifierUpdated.Hash, up) + assert.NoError(t, err) + + // Test find updated value + filter = fb.And( + fb.Eq("value", verifierUpdated.Value), + fb.Eq("created", updateTime.String()), + ) + verifiers, _, err := s.GetVerifiers(ctx, filter) + assert.NoError(t, err) + assert.Equal(t, 1, len(verifiers)) + + s.callbacks.AssertExpectations(t) +} + +func TestUpsertVerifierFailBegin(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) + err := s.UpsertVerifier(context.Background(), &fftypes.Verifier{}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10114", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertVerifierFailSelect(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertVerifier(context.Background(), &fftypes.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertVerifierFailInsert(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{})) + mock.ExpectExec("INSERT .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertVerifier(context.Background(), &fftypes.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10116", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertVerifierFailUpdate(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"verifier"}). + AddRow("id1")) + mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + err := s.UpsertVerifier(context.Background(), &fftypes.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10117", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestUpsertVerifierFailCommit(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"verifier"})) + mock.ExpectExec("INSERT .*").WillReturnResult(sqlmock.NewResult(1, 1)) + mock.ExpectCommit().WillReturnError(fmt.Errorf("pop")) + err := s.UpsertVerifier(context.Background(), &fftypes.Verifier{Hash: fftypes.NewRandB32()}, database.UpsertOptimizationSkip) + assert.Regexp(t, "FF10119", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierByHashSelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetVerifierByHash(context.Background(), fftypes.NewRandB32()) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierByNameSelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetVerifierByValue(context.Background(), fftypes.VerifierTypeEthAddress, "ff_system", "0x12345") + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierByVerifierSelectFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + _, err := s.GetVerifierByValue(context.Background(), fftypes.VerifierTypeEthAddress, "ff_system", "0x12345") + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierByHashNotFound(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"verifier", "verifier", "verifier"})) + msg, err := s.GetVerifierByHash(context.Background(), fftypes.NewRandB32()) + assert.NoError(t, err) + assert.Nil(t, msg) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierByHashScanFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"verifier"}).AddRow("only one")) + _, err := s.GetVerifierByHash(context.Background(), fftypes.NewRandB32()) + assert.Regexp(t, "FF10121", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierQueryFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnError(fmt.Errorf("pop")) + f := database.VerifierQueryFactory.NewFilter(context.Background()).Eq("value", "") + _, _, err := s.GetVerifiers(context.Background(), f) + assert.Regexp(t, "FF10115", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestGetVerifierBuildQueryFail(t *testing.T) { + s, _ := newMockProvider().init() + f := database.VerifierQueryFactory.NewFilter(context.Background()).Eq("value", map[bool]bool{true: false}) + _, _, err := s.GetVerifiers(context.Background(), f) + assert.Regexp(t, "FF10149.*type", err) +} + +func TestGetVerifierReadMessageFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectQuery("SELECT .*").WillReturnRows(sqlmock.NewRows([]string{"value"}).AddRow("only one")) + f := database.VerifierQueryFactory.NewFilter(context.Background()).Eq("value", "") + _, _, err := s.GetVerifiers(context.Background(), f) + assert.Regexp(t, "FF10121", err) + assert.NoError(t, mock.ExpectationsWereMet()) +} + +func TestVerifierUpdateBeginFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin().WillReturnError(fmt.Errorf("pop")) + u := database.VerifierQueryFactory.NewUpdate(context.Background()).Set("value", "anything") + err := s.UpdateVerifier(context.Background(), fftypes.NewRandB32(), u) + assert.Regexp(t, "FF10114", err) +} + +func TestVerifierUpdateBuildQueryFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + u := database.VerifierQueryFactory.NewUpdate(context.Background()).Set("value", map[bool]bool{true: false}) + err := s.UpdateVerifier(context.Background(), fftypes.NewRandB32(), u) + assert.Regexp(t, "FF10149.*value", err) +} + +func TestVerifierUpdateFail(t *testing.T) { + s, mock := newMockProvider().init() + mock.ExpectBegin() + mock.ExpectExec("UPDATE .*").WillReturnError(fmt.Errorf("pop")) + mock.ExpectRollback() + u := database.VerifierQueryFactory.NewUpdate(context.Background()).Set("value", fftypes.NewUUID()) + err := s.UpdateVerifier(context.Background(), fftypes.NewRandB32(), u) + assert.Regexp(t, "FF10117", err) +} diff --git a/internal/dataexchange/ffdx/ffdx.go b/internal/dataexchange/ffdx/ffdx.go index a88768ba7d..f5b13b6af6 100644 --- a/internal/dataexchange/ffdx/ffdx.go +++ b/internal/dataexchange/ffdx/ffdx.go @@ -45,7 +45,7 @@ type FFDX struct { needsInit bool initialized bool initMutex sync.Mutex - nodes []fftypes.DXInfo + nodes []fftypes.JSONObject } type wsEvent struct { @@ -115,7 +115,7 @@ func (h *FFDX) Name() string { return "ffdx" } -func (h *FFDX) Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.DXInfo, callbacks dataexchange.Callbacks) (err error) { +func (h *FFDX) Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.JSONObject, callbacks dataexchange.Callbacks) (err error) { h.ctx = log.WithLogField(ctx, "dx", "https") h.callbacks = callbacks @@ -150,21 +150,6 @@ func (h *FFDX) Capabilities() *dataexchange.Capabilities { return h.capabilities } -func (h *FFDX) dxEndpointArray(nodes []fftypes.DXInfo) []fftypes.JSONObject { - // The remote DataExchange connector HTTP API expects a flat array - // where "id" is in embedded in each entry - because that's how it - // originally passed the data to us. - // In the DXInfo contract on the Go plugin in FireFly we raise the "id" up - // to be a first class "peer" field, so it can be indexed outside of the opaque - // endpoint payload. - // This function just converts back to a flat array. - dxEndpointArray := make([]fftypes.JSONObject, len(nodes)) - for i, node := range nodes { - dxEndpointArray[i] = node.Endpoint - } - return dxEndpointArray -} - func (h *FFDX) beforeConnect(ctx context.Context) error { h.initMutex.Lock() defer h.initMutex.Unlock() @@ -173,7 +158,7 @@ func (h *FFDX) beforeConnect(ctx context.Context) error { h.initialized = false var status dxStatus res, err := h.client.R().SetContext(ctx). - SetBody(h.dxEndpointArray(h.nodes)). + SetBody(h.nodes). SetResult(&status). Post("/api/v1/init") if err != nil || !res.IsSuccess() { @@ -197,30 +182,34 @@ func (h *FFDX) checkInitialized(ctx context.Context) error { return nil } -func (h *FFDX) GetEndpointInfo(ctx context.Context) (peer fftypes.DXInfo, err error) { +func (h *FFDX) GetEndpointInfo(ctx context.Context) (peer fftypes.JSONObject, err error) { if err := h.checkInitialized(ctx); err != nil { return peer, err } res, err := h.client.R().SetContext(ctx). - SetResult(&peer.Endpoint). + SetResult(&peer). Get("/api/v1/id") if err != nil || !res.IsSuccess() { return peer, restclient.WrapRestErr(ctx, res, err, i18n.MsgDXRESTErr) } - peer.Peer = peer.Endpoint.GetString("id") + id := peer.GetString("id") + if id == "" { + log.L(ctx).Errorf("Invalid DX info: %s", peer.String()) + return nil, i18n.NewError(ctx, i18n.MsgDXInfoMissingID) + } h.nodes = append(h.nodes, peer) return peer, nil } -func (h *FFDX) AddPeer(ctx context.Context, peer fftypes.DXInfo) (err error) { +func (h *FFDX) AddPeer(ctx context.Context, peer fftypes.JSONObject) (err error) { if err := h.checkInitialized(ctx); err != nil { return err } res, err := h.client.R().SetContext(ctx). - SetBody(peer.Endpoint). - Put(fmt.Sprintf("/api/v1/peers/%s", peer.Peer)) + SetBody(peer). + Put(fmt.Sprintf("/api/v1/peers/%s", peer.GetString("id"))) if err != nil || !res.IsSuccess() { return restclient.WrapRestErr(ctx, res, err, i18n.MsgDXRESTErr) } diff --git a/internal/dataexchange/ffdx/ffdx_test.go b/internal/dataexchange/ffdx/ffdx_test.go index 1a659cf09c..5d29d6bdd2 100644 --- a/internal/dataexchange/ffdx/ffdx_test.go +++ b/internal/dataexchange/ffdx/ffdx_test.go @@ -56,7 +56,7 @@ func newTestFFDX(t *testing.T, manifestEnabled bool) (h *FFDX, toServer, fromSer utConfPrefix.Set(DataExchangeManifestEnabled, manifestEnabled) h = &FFDX{initialized: true} - nodes := make([]fftypes.DXInfo, 0) + nodes := make([]fftypes.JSONObject, 0) h.InitPrefix(utConfPrefix) err := h.Init(context.Background(), utConfPrefix, nodes, &dataexchangemocks.Callbacks{}) @@ -72,7 +72,7 @@ func newTestFFDX(t *testing.T, manifestEnabled bool) (h *FFDX, toServer, fromSer func TestInitBadURL(t *testing.T) { config.Reset() h := &FFDX{} - nodes := make([]fftypes.DXInfo, 0) + nodes := make([]fftypes.JSONObject, 0) h.InitPrefix(utConfPrefix) utConfPrefix.Set(restclient.HTTPConfigURL, "::::////") err := h.Init(context.Background(), utConfPrefix, nodes, &dataexchangemocks.Callbacks{}) @@ -82,7 +82,7 @@ func TestInitBadURL(t *testing.T) { func TestInitMissingURL(t *testing.T) { config.Reset() h := &FFDX{} - nodes := make([]fftypes.DXInfo, 0) + nodes := make([]fftypes.JSONObject, 0) h.InitPrefix(utConfPrefix) err := h.Init(context.Background(), utConfPrefix, nodes, &dataexchangemocks.Callbacks{}) assert.Regexp(t, "FF10138", err) @@ -101,12 +101,26 @@ func TestGetEndpointInfo(t *testing.T) { peer, err := h.GetEndpointInfo(context.Background()) assert.NoError(t, err) - assert.Equal(t, "peer1", peer.Peer) + assert.Equal(t, "peer1", peer.GetString("id")) assert.Equal(t, fftypes.JSONObject{ "id": "peer1", "endpoint": "https://peer1.example.com", "cert": "cert data...", - }, peer.Endpoint) + }, peer) +} + +func TestGetEndpointMissingID(t *testing.T) { + h, _, _, httpURL, done := newTestFFDX(t, false) + defer done() + + httpmock.RegisterResponder("GET", fmt.Sprintf("%s/api/v1/id", httpURL), + httpmock.NewJsonResponderOrPanic(200, fftypes.JSONObject{ + "endpoint": "https://peer1.example.com", + "cert": "cert data...", + })) + + _, err := h.GetEndpointInfo(context.Background()) + assert.Regexp(t, "FF10367", err) } func TestGetEndpointInfoError(t *testing.T) { @@ -127,13 +141,10 @@ func TestAddPeer(t *testing.T) { httpmock.RegisterResponder("PUT", fmt.Sprintf("%s/api/v1/peers/peer1", httpURL), httpmock.NewJsonResponderOrPanic(200, fftypes.JSONObject{})) - err := h.AddPeer(context.Background(), fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{ - "id": "peer1", - "endpoint": "https://peer1.example.com", - "cert": "cert...", - }, + err := h.AddPeer(context.Background(), fftypes.JSONObject{ + "id": "peer1", + "endpoint": "https://peer1.example.com", + "cert": "cert...", }) assert.NoError(t, err) } @@ -145,9 +156,8 @@ func TestAddPeerError(t *testing.T) { httpmock.RegisterResponder("PUT", fmt.Sprintf("%s/api/v1/peers/peer1", httpURL), httpmock.NewJsonResponderOrPanic(500, fftypes.JSONObject{})) - err := h.AddPeer(context.Background(), fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{}, + err := h.AddPeer(context.Background(), fftypes.JSONObject{ + "id": "peer1", }) assert.Regexp(t, "FF10229", err) } @@ -562,7 +572,7 @@ func TestWebsocketWithReinit(t *testing.T) { u.Scheme = "http" httpURL := u.String() h := &FFDX{} - nodes := []fftypes.DXInfo{{}} + nodes := []fftypes.JSONObject{{}} config.Reset() h.InitPrefix(utConfPrefix) @@ -573,7 +583,7 @@ func TestWebsocketWithReinit(t *testing.T) { count := 0 httpmock.RegisterResponder("POST", fmt.Sprintf("%s/api/v1/init", httpURL), func(req *http.Request) (*http.Response, error) { - var reqNodes []fftypes.DXInfo + var reqNodes []fftypes.JSONObject err := json.NewDecoder(req.Body).Decode(&reqNodes) assert.NoError(t, err) assert.Equal(t, 1, len(reqNodes)) @@ -614,7 +624,7 @@ func TestDXUninitialized(t *testing.T) { _, err := h.GetEndpointInfo(context.Background()) assert.Regexp(t, "FF10342", err) - err = h.AddPeer(context.Background(), fftypes.DXInfo{}) + err = h.AddPeer(context.Background(), fftypes.JSONObject{}) assert.Regexp(t, "FF10342", err) err = h.TransferBLOB(context.Background(), fftypes.NewUUID(), "peer1", "ns1/id1") diff --git a/internal/definitions/definition_handler.go b/internal/definitions/definition_handler.go index 2b950df630..bd457d0a0f 100644 --- a/internal/definitions/definition_handler.go +++ b/internal/definitions/definition_handler.go @@ -24,8 +24,10 @@ import ( "github.com/hyperledger/firefly/internal/broadcast" "github.com/hyperledger/firefly/internal/contracts" "github.com/hyperledger/firefly/internal/data" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/internal/privatemessaging" + "github.com/hyperledger/firefly/pkg/blockchain" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/dataexchange" "github.com/hyperledger/firefly/pkg/fftypes" @@ -35,10 +37,15 @@ import ( type DefinitionHandlers interface { privatemessaging.GroupManager - HandleDefinitionBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (DefinitionMessageAction, *DefinitionBatchActions, error) + HandleDefinitionBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (HandlerResult, error) SendReply(ctx context.Context, event *fftypes.Event, reply *fftypes.MessageInOut) } +type HandlerResult struct { + Action DefinitionMessageAction + CustomCorrelator *fftypes.UUID +} + // DefinitionMessageAction is the action to be taken on an individual definition message type DefinitionMessageAction int @@ -56,35 +63,44 @@ const ( ActionWait ) -// DefinitionBatchActions are actions to be taken at the end of a definition batch -// See further notes on "batchActions" in the event aggregator -type DefinitionBatchActions struct { +// DefinitionBatchState tracks the state between definition handlers that run in-line on the pin processing route in the +// aggregator as part of a batch of pins. They might have complex API calls, and interdependencies, that need to be managed via this state. +// The actions to be taken at the end of a definition batch. +// See further notes on "batchState" in the event aggregator +type DefinitionBatchState interface { // PreFinalize may perform a blocking action (possibly to an external connector) that should execute outside database RunAsGroup - PreFinalize func(ctx context.Context) error + AddPreFinalize(func(ctx context.Context) error) // Finalize may perform final, non-idempotent database operations (such as inserting Events) - Finalize func(ctx context.Context) error + AddFinalize(func(ctx context.Context) error) + + // GetPendingConfirm returns a map of messages are that pending confirmation after already being processed in this batch + GetPendingConfirm() map[fftypes.UUID]*fftypes.Message } type definitionHandlers struct { - database database.Plugin - exchange dataexchange.Plugin - data data.Manager - broadcast broadcast.Manager - messaging privatemessaging.Manager - assets assets.Manager - contracts contracts.Manager + database database.Plugin + blockchain blockchain.Plugin + exchange dataexchange.Plugin + data data.Manager + identity identity.Manager + broadcast broadcast.Manager + messaging privatemessaging.Manager + assets assets.Manager + contracts contracts.Manager } -func NewDefinitionHandlers(di database.Plugin, dx dataexchange.Plugin, dm data.Manager, bm broadcast.Manager, pm privatemessaging.Manager, am assets.Manager, cm contracts.Manager) DefinitionHandlers { +func NewDefinitionHandlers(di database.Plugin, bi blockchain.Plugin, dx dataexchange.Plugin, dm data.Manager, im identity.Manager, bm broadcast.Manager, pm privatemessaging.Manager, am assets.Manager, cm contracts.Manager) DefinitionHandlers { return &definitionHandlers{ - database: di, - exchange: dx, - data: dm, - broadcast: bm, - messaging: pm, - assets: am, - contracts: cm, + database: di, + blockchain: bi, + exchange: dx, + data: dm, + identity: im, + broadcast: bm, + messaging: pm, + assets: am, + contracts: cm, } } @@ -104,27 +120,33 @@ func (dh *definitionHandlers) EnsureLocalGroup(ctx context.Context, group *fftyp return dh.messaging.EnsureLocalGroup(ctx, group) } -func (dh *definitionHandlers) HandleDefinitionBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (msgAction DefinitionMessageAction, batchActions *DefinitionBatchActions, err error) { +func (dh *definitionHandlers) HandleDefinitionBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (msgAction HandlerResult, err error) { l := log.L(ctx) l.Infof("Confirming system definition broadcast '%s' [%s]", msg.Header.Tag, msg.Header.ID) - switch fftypes.SystemTag(msg.Header.Tag) { + switch msg.Header.Tag { case fftypes.SystemTagDefineDatatype: - return dh.handleDatatypeBroadcast(ctx, msg, data, tx) + return dh.handleDatatypeBroadcast(ctx, state, msg, data, tx) case fftypes.SystemTagDefineNamespace: - return dh.handleNamespaceBroadcast(ctx, msg, data, tx) - case fftypes.SystemTagDefineOrganization: - return dh.handleOrganizationBroadcast(ctx, msg, data) - case fftypes.SystemTagDefineNode: - return dh.handleNodeBroadcast(ctx, msg, data) + return dh.handleNamespaceBroadcast(ctx, state, msg, data, tx) + case fftypes.DeprecatedSystemTagDefineOrganization: + return dh.handleDeprecatedOrganizationBroadcast(ctx, state, msg, data) + case fftypes.DeprecatedSystemTagDefineNode: + return dh.handleDeprecatedNodeBroadcast(ctx, state, msg, data) + case fftypes.SystemTagIdentityClaim: + return dh.handleIdentityClaimBroadcast(ctx, state, msg, data, nil) + case fftypes.SystemTagIdentityVerification: + return dh.handleIdentityVerificationBroadcast(ctx, state, msg, data) + case fftypes.SystemTagIdentityUpdate: + return dh.handleIdentityUpdateBroadcast(ctx, state, msg, data) case fftypes.SystemTagDefinePool: - return dh.handleTokenPoolBroadcast(ctx, msg, data) + return dh.handleTokenPoolBroadcast(ctx, state, msg, data) case fftypes.SystemTagDefineFFI: - return dh.handleFFIBroadcast(ctx, msg, data, tx) + return dh.handleFFIBroadcast(ctx, state, msg, data, tx) case fftypes.SystemTagDefineContractAPI: - return dh.handleContractAPIBroadcast(ctx, msg, data, tx) + return dh.handleContractAPIBroadcast(ctx, state, msg, data, tx) default: l.Warnf("Unknown SystemTag '%s' for definition ID '%s'", msg.Header.Tag, msg.Header.ID) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } } diff --git a/internal/definitions/definition_handler_contracts.go b/internal/definitions/definition_handler_contracts.go index ffec3b10b3..b718542d25 100644 --- a/internal/definitions/definition_handler_contracts.go +++ b/internal/definitions/definition_handler_contracts.go @@ -65,7 +65,7 @@ func (dh *definitionHandlers) persistContractAPI(ctx context.Context, api *fftyp return err == nil, err } -func (dh *definitionHandlers) handleFFIBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleFFIBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (HandlerResult, error) { l := log.L(ctx) var broadcast fftypes.FFI valid := dh.getSystemBroadcastPayload(ctx, msg, data, &broadcast) @@ -78,26 +78,25 @@ func (dh *definitionHandlers) handleFFIBroadcast(ctx context.Context, msg *fftyp broadcast.Message = msg.Header.ID valid, err = dh.persistFFI(ctx, &broadcast) if err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } } } if !valid { l.Warnf("Contract interface rejected id=%s author=%s", broadcast.ID, msg.Header.Author) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } l.Infof("Contract interface created id=%s author=%s", broadcast.ID, msg.Header.Author) - return ActionConfirm, &DefinitionBatchActions{ - Finalize: func(ctx context.Context) error { - event := fftypes.NewEvent(fftypes.EventTypeContractInterfaceConfirmed, broadcast.Namespace, broadcast.ID, tx) - return dh.database.InsertEvent(ctx, event) - }, - }, nil + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeContractInterfaceConfirmed, broadcast.Namespace, broadcast.ID, tx) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, nil } -func (dh *definitionHandlers) handleContractAPIBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleContractAPIBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (HandlerResult, error) { l := log.L(ctx) var broadcast fftypes.ContractAPI valid := dh.getSystemBroadcastPayload(ctx, msg, data, &broadcast) @@ -110,21 +109,20 @@ func (dh *definitionHandlers) handleContractAPIBroadcast(ctx context.Context, ms broadcast.Message = msg.Header.ID valid, err = dh.persistContractAPI(ctx, &broadcast) if err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } } } if !valid { l.Warnf("Contract API rejected id=%s author=%s", broadcast.ID, msg.Header.Author) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } l.Infof("Contract API created id=%s author=%s", broadcast.ID, msg.Header.Author) - return ActionConfirm, &DefinitionBatchActions{ - Finalize: func(ctx context.Context) error { - event := fftypes.NewEvent(fftypes.EventTypeContractAPIConfirmed, broadcast.Namespace, broadcast.ID, tx) - return dh.database.InsertEvent(ctx, event) - }, - }, nil + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeContractAPIConfirmed, broadcast.Namespace, broadcast.ID, tx) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, nil } diff --git a/internal/definitions/definition_handler_contracts_test.go b/internal/definitions/definition_handler_contracts_test.go index 9f66445cfe..2adddf091e 100644 --- a/internal/definitions/definition_handler_contracts_test.go +++ b/internal/definitions/definition_handler_contracts_test.go @@ -87,7 +87,7 @@ func testContractAPI() *fftypes.ContractAPI { } func TestHandleFFIBroadcastOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) b, err := json.Marshal(testFFI()) assert.NoError(t, err) @@ -102,20 +102,21 @@ func TestHandleFFIBroadcastOk(t *testing.T) { mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) mcm := dh.contracts.(*contractmocks.Manager) mcm.On("ValidateFFIAndSetPathnames", mock.Anything, mock.Anything).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineFFI), + Tag: fftypes.SystemTagDefineFFI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.NoError(t, err) mbi.AssertExpectations(t) } func TestPersistFFIValidateFFIFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mcm := dh.contracts.(*contractmocks.Manager) mcm.On("ValidateFFIAndSetPathnames", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) valid, err := dh.persistFFI(context.Background(), testFFI()) @@ -125,22 +126,23 @@ func TestPersistFFIValidateFFIFail(t *testing.T) { } func TestHandleFFIBroadcastReject(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mcm := dh.contracts.(*contractmocks.Manager) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) mcm.On("ValidateFFIAndSetPathnames", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - action, _, err := dh.handleFFIBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.handleFFIBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineFFI), + Tag: fftypes.SystemTagDefineFFI, }, }, []*fftypes.Data{}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestPersistFFIUpsertFFIFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("UpsertFFI", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) mcm := dh.contracts.(*contractmocks.Manager) @@ -152,7 +154,7 @@ func TestPersistFFIUpsertFFIFail(t *testing.T) { } func TestPersistFFIUpsertFFIMethodFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("UpsertFFI", mock.Anything, mock.Anything).Return(nil) mbi.On("UpsertFFIMethod", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) @@ -165,7 +167,7 @@ func TestPersistFFIUpsertFFIMethodFail(t *testing.T) { } func TestPersistFFIUpsertFFIEventFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("UpsertFFI", mock.Anything, mock.Anything).Return(nil) mbi.On("UpsertFFIMethod", mock.Anything, mock.Anything).Return(nil) @@ -179,7 +181,7 @@ func TestPersistFFIUpsertFFIEventFail(t *testing.T) { } func TestHandleFFIBroadcastValidateFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ffi := testFFI() ffi.Name = "*%^!$%^&*" b, err := json.Marshal(ffi) @@ -189,17 +191,18 @@ func TestHandleFFIBroadcastValidateFail(t *testing.T) { } mbi := dh.database.(*databasemocks.Plugin) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineFFI), + Tag: fftypes.SystemTagDefineFFI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleFFIBroadcastPersistFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ffi := testFFI() b, err := json.Marshal(ffi) assert.NoError(t, err) @@ -211,17 +214,18 @@ func TestHandleFFIBroadcastPersistFail(t *testing.T) { mdi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) mcm := dh.contracts.(*contractmocks.Manager) mcm.On("ValidateFFIAndSetPathnames", mock.Anything, mock.Anything).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineFFI), + Tag: fftypes.SystemTagDefineFFI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.Regexp(t, "pop", err) + bs.assertNoFinalizers() } func TestHandleContractAPIBroadcastOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) b, err := json.Marshal(testFFI()) assert.NoError(t, err) @@ -233,20 +237,20 @@ func TestHandleContractAPIBroadcastOk(t *testing.T) { mbi.On("UpsertContractAPI", mock.Anything, mock.Anything, mock.Anything).Return(nil) mbi.On("GetContractAPIByName", mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineContractAPI), + Tag: fftypes.SystemTagDefineContractAPI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.NoError(t, err) mbi.AssertExpectations(t) } func TestPersistContractAPIGetFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetContractAPIByName", mock.Anything, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) _, err := dh.persistContractAPI(context.Background(), testContractAPI()) @@ -257,7 +261,7 @@ func TestPersistContractAPIGetFail(t *testing.T) { func TestPersistContractAPIDifferentLocation(t *testing.T) { existing := testContractAPI() existing.Location = fftypes.JSONAnyPtr(`{"existing": true}`) - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetContractAPIByName", mock.Anything, mock.Anything, mock.Anything).Return(existing, nil) valid, err := dh.persistContractAPI(context.Background(), testContractAPI()) @@ -267,7 +271,7 @@ func TestPersistContractAPIDifferentLocation(t *testing.T) { } func TestPersistContractAPIUpsertFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetContractAPIByName", mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) mbi.On("UpsertContractAPI", mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) @@ -277,7 +281,7 @@ func TestPersistContractAPIUpsertFail(t *testing.T) { } func TestHandleContractAPIBroadcastValidateFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) api := testContractAPI() api.Name = "*%^!$%^&*" b, err := json.Marshal(api) @@ -287,17 +291,18 @@ func TestHandleContractAPIBroadcastValidateFail(t *testing.T) { } mbi := dh.database.(*databasemocks.Plugin) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineContractAPI), + Tag: fftypes.SystemTagDefineContractAPI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleContractAPIBroadcastPersistFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ffi := testFFI() b, err := json.Marshal(ffi) assert.NoError(t, err) @@ -308,11 +313,12 @@ func TestHandleContractAPIBroadcastPersistFail(t *testing.T) { mbi.On("GetContractAPIByName", mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) mbi.On("UpsertContractAPI", mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineContractAPI), + Tag: fftypes.SystemTagDefineContractAPI, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.Regexp(t, "pop", err) + bs.assertNoFinalizers() } diff --git a/internal/definitions/definition_handler_datatype.go b/internal/definitions/definition_handler_datatype.go index 1bbd6ea523..ab1eb58630 100644 --- a/internal/definitions/definition_handler_datatype.go +++ b/internal/definitions/definition_handler_datatype.go @@ -23,42 +23,41 @@ import ( "github.com/hyperledger/firefly/pkg/fftypes" ) -func (dh *definitionHandlers) handleDatatypeBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleDatatypeBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (HandlerResult, error) { l := log.L(ctx) var dt fftypes.Datatype valid := dh.getSystemBroadcastPayload(ctx, msg, data, &dt) if !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } if err := dt.Validate(ctx, true); err != nil { l.Warnf("Unable to process datatype broadcast %s - validate failed: %s", msg.Header.ID, err) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } if err := dh.data.CheckDatatype(ctx, dt.Namespace, &dt); err != nil { l.Warnf("Unable to process datatype broadcast %s - schema check: %s", msg.Header.ID, err) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } existing, err := dh.database.GetDatatypeByName(ctx, dt.Namespace, dt.Name, dt.Version) if err != nil { - return ActionRetry, nil, err // We only return database errors + return HandlerResult{Action: ActionRetry}, err // We only return database errors } if existing != nil { l.Warnf("Unable to process datatype broadcast %s (%s:%s) - duplicate of %v", msg.Header.ID, dt.Namespace, dt, existing.ID) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } if err = dh.database.UpsertDatatype(ctx, &dt, false); err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } - return ActionConfirm, &DefinitionBatchActions{ - Finalize: func(ctx context.Context) error { - event := fftypes.NewEvent(fftypes.EventTypeDatatypeConfirmed, dt.Namespace, dt.ID, tx) - return dh.database.InsertEvent(ctx, event) - }, - }, nil + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeDatatypeConfirmed, dt.Namespace, dt.ID, tx) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, nil } diff --git a/internal/definitions/definition_handler_datatype_test.go b/internal/definitions/definition_handler_datatype_test.go index 02a0d3eee7..87c4fde596 100644 --- a/internal/definitions/definition_handler_datatype_test.go +++ b/internal/definitions/definition_handler_datatype_test.go @@ -30,7 +30,7 @@ import ( ) func TestHandleDefinitionBroadcastDatatypeOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -53,14 +53,14 @@ func TestHandleDefinitionBroadcastDatatypeOk(t *testing.T) { mbi.On("GetDatatypeByName", mock.Anything, "ns1", "name1", "ver1").Return(nil, nil) mbi.On("UpsertDatatype", mock.Anything, mock.Anything, false).Return(nil) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.NoError(t, err) mdm.AssertExpectations(t) @@ -68,7 +68,7 @@ func TestHandleDefinitionBroadcastDatatypeOk(t *testing.T) { } func TestHandleDefinitionBroadcastDatatypeEventFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -91,14 +91,14 @@ func TestHandleDefinitionBroadcastDatatypeEventFail(t *testing.T) { mbi.On("GetDatatypeByName", mock.Anything, "ns1", "name1", "ver1").Return(nil, nil) mbi.On("UpsertDatatype", mock.Anything, mock.Anything, false).Return(nil) mbi.On("InsertEvent", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.EqualError(t, err, "pop") mdm.AssertExpectations(t) @@ -106,7 +106,7 @@ func TestHandleDefinitionBroadcastDatatypeEventFail(t *testing.T) { } func TestHandleDefinitionBroadcastDatatypeMissingID(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ Validator: fftypes.ValidatorTypeJSON, @@ -122,17 +122,18 @@ func TestHandleDefinitionBroadcastDatatypeMissingID(t *testing.T) { Value: fftypes.JSONAnyPtrBytes(b), } - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastBadSchema(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -151,19 +152,20 @@ func TestHandleDefinitionBroadcastBadSchema(t *testing.T) { mdm := dh.data.(*datamocks.Manager) mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) mdm.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastMissingData(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -175,17 +177,18 @@ func TestHandleDefinitionBroadcastMissingData(t *testing.T) { } dt.Hash = dt.Value.Hash() - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastDatatypeLookupFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -206,21 +209,22 @@ func TestHandleDefinitionBroadcastDatatypeLookupFail(t *testing.T) { mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetDatatypeByName", mock.Anything, "ns1", "name1", "ver1").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdm.AssertExpectations(t) mbi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastUpsertFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -242,20 +246,21 @@ func TestHandleDefinitionBroadcastUpsertFail(t *testing.T) { mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetDatatypeByName", mock.Anything, "ns1", "name1", "ver1").Return(nil, nil) mbi.On("UpsertDatatype", mock.Anything, mock.Anything, false).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdm.AssertExpectations(t) mbi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastDatatypeDuplicate(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) dt := &fftypes.Datatype{ ID: fftypes.NewUUID(), @@ -276,14 +281,15 @@ func TestHandleDefinitionBroadcastDatatypeDuplicate(t *testing.T) { mdm.On("CheckDatatype", mock.Anything, "ns1", mock.Anything).Return(nil) mbi := dh.database.(*databasemocks.Plugin) mbi.On("GetDatatypeByName", mock.Anything, "ns1", "name1", "ver1").Return(dt, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineDatatype), + Tag: fftypes.SystemTagDefineDatatype, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) mdm.AssertExpectations(t) mbi.AssertExpectations(t) + bs.assertNoFinalizers() } diff --git a/internal/definitions/definition_handler_identity_claim.go b/internal/definitions/definition_handler_identity_claim.go new file mode 100644 index 0000000000..ec715357f1 --- /dev/null +++ b/internal/definitions/definition_handler_identity_claim.go @@ -0,0 +1,218 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + "fmt" + + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +func (dh *definitionHandlers) handleIdentityClaimBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, verificationID *fftypes.UUID) (HandlerResult, error) { + var claim fftypes.IdentityClaim + valid := dh.getSystemBroadcastPayload(ctx, msg, data, &claim) + if !valid { + return HandlerResult{Action: ActionReject}, nil + } + + return dh.handleIdentityClaim(ctx, state, msg, &claim, verificationID) + +} + +func (dh *definitionHandlers) verifyClaimSignature(ctx context.Context, msg *fftypes.Message, identity *fftypes.Identity, parent *fftypes.Identity) (valid bool) { + + author := msg.Header.Author + if author == "" { + return false + } + + var expectedSigner *fftypes.Identity + switch { + case identity.Type == fftypes.IdentityTypeNode: + // In the special case of a node, the parent signs it directly + expectedSigner = parent + default: + expectedSigner = identity + } + + valid = author == expectedSigner.DID || + (expectedSigner.Type == fftypes.IdentityTypeOrg && author == fmt.Sprintf("%s%s", fftypes.FireFlyOrgDIDPrefix, expectedSigner.ID)) + if !valid { + log.L(ctx).Warnf("Unable to process identity claim %s - signature mismatch type=%s author=%s expected=%s", msg.Header.ID, identity.Type, author, expectedSigner.DID) + } + return valid +} + +func (dh *definitionHandlers) getClaimVerifier(msg *fftypes.Message, identity *fftypes.Identity) *fftypes.Verifier { + verifier := &fftypes.Verifier{ + Identity: identity.ID, + Namespace: identity.Namespace, + } + switch identity.Type { + case fftypes.IdentityTypeNode: + verifier.VerifierRef.Type = fftypes.VerifierTypeFFDXPeerID + verifier.VerifierRef.Value = identity.Profile.GetString("id") + default: + verifier.VerifierRef.Type = dh.blockchain.VerifierType() + verifier.VerifierRef.Value = msg.Header.Key + } + verifier.Seal() + return verifier +} + +func (dh *definitionHandlers) confirmVerificationForClaim(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, identity, parent *fftypes.Identity) (*fftypes.UUID, error) { + // Query for messages on the topic for this DID, signed by the right identity + idTopic := identity.Topic() + fb := database.MessageQueryFactory.NewFilter(ctx) + filter := fb.And( + fb.Eq("topics", idTopic), + fb.Eq("author", parent.DID), + fb.Eq("type", fftypes.MessageTypeDefinition), + fb.Eq("state", fftypes.MessageStateConfirmed), + fb.Eq("tag", fftypes.SystemTagIdentityVerification), + ) + candidates, _, err := dh.database.GetMessages(ctx, filter) + if err != nil { + return nil, err + } + // We also need to check pending messages in the current pin batch + for _, pending := range state.GetPendingConfirm() { + if pending.Header.Topics.String() == idTopic && + pending.Header.Author == parent.DID && + pending.Header.Type == fftypes.MessageTypeDefinition && + pending.Header.Tag == fftypes.SystemTagIdentityVerification { + candidates = append(candidates, pending) + } + } + for _, candidate := range candidates { + data, foundAll, err := dh.data.GetMessageData(ctx, candidate, true) + if err != nil { + return nil, err + } + identityMatches := false + var verificationID *fftypes.UUID + var verificationHash *fftypes.Bytes32 + if foundAll { + var verification fftypes.IdentityVerification + if !dh.getSystemBroadcastPayload(ctx, msg, data, &verification) { + return nil, nil + } + identityMatches = verification.Identity.Equals(ctx, &identity.IdentityBase) + verificationID = verification.Claim.ID + verificationHash = verification.Claim.Hash + if identityMatches && msg.Header.ID.Equals(verificationID) && msg.Hash.Equals(verificationHash) { + return candidate.Header.ID, nil + } + } + log.L(ctx).Warnf("Skipping invalid potential verification '%s' for identity claimID='%s' claimHash=%s: foundData=%t identityMatch=%t id=%s hash=%s", candidate.Header.ID, msg.Header.ID, msg.Hash, foundAll, identityMatches, verificationID, verificationHash) + } + return nil, nil +} + +func (dh *definitionHandlers) handleIdentityClaim(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, identityClaim *fftypes.IdentityClaim, verificationID *fftypes.UUID) (HandlerResult, error) { + l := log.L(ctx) + + identity := identityClaim.Identity + parent, retryable, err := dh.identity.VerifyIdentityChain(ctx, identity) + if err != nil && retryable { + return HandlerResult{Action: ActionRetry}, err + } else if err != nil { + // This cannot be processed as the parent does not exist (or similar). + // We treat this as a bad request, as nodes should not be broadcast until the parent identity is + // is already confirmed. (Note different processing for org/custom childs, where there's a parent + // verification to coordinate). + l.Warnf("Unable to process identity claim %s: %s", msg.Header.ID, err) + return HandlerResult{Action: ActionReject}, nil + } + + // Check signature verification + if !dh.verifyClaimSignature(ctx, msg, identity, parent) { + return HandlerResult{Action: ActionReject}, nil + } + + existingIdentity, err := dh.database.GetIdentityByName(ctx, identity.Type, identity.Namespace, identity.Name) + if err == nil && existingIdentity == nil { + existingIdentity, err = dh.database.GetIdentityByID(ctx, identity.ID) + } + if err != nil { + return HandlerResult{Action: ActionRetry}, err // retry database errors + } + if existingIdentity != nil && !existingIdentity.IdentityBase.Equals(ctx, &identity.IdentityBase) { + // If the existing one matches - this is just idempotent replay. No action needed, just confirm + l.Warnf("Unable to process identity claim %s - conflict with existing: %v", msg.Header.ID, existingIdentity.ID) + return HandlerResult{Action: ActionReject}, nil + } + + // Check uniquness of verifier + verifier := dh.getClaimVerifier(msg, identity) + existingVerifier, err := dh.database.GetVerifierByValue(ctx, verifier.Type, identity.Namespace, verifier.Value) + if err != nil { + return HandlerResult{Action: ActionRetry}, err // retry database errors + } + if existingVerifier != nil && !existingVerifier.Identity.Equals(identity.ID) { + log.L(ctx).Warnf("Unable to process identity claim %s - verifier type=%s value=%s already registered: %v", msg.Header.ID, verifier.Type, verifier.Value, existingVerifier.Hash) + return HandlerResult{Action: ActionReject}, nil + } + + if parent != nil && identity.Type != fftypes.IdentityTypeNode { + // The verification might be passed into this function, if we confirm the verification second, + // or we might have to hunt for it, if we confirm the verification first. + if verificationID == nil { + // Search for a corresponding verification message on the same topic + verificationID, err = dh.confirmVerificationForClaim(ctx, state, msg, identity, parent) + if err != nil { + return HandlerResult{Action: ActionRetry}, err // retry database errors + } + } + if verificationID == nil { + // Ok, we still confirm the message as it's valid, and we do not want to block the context. + // But we do NOT go on to create the identity - we will be called back + return HandlerResult{Action: ActionConfirm}, nil + } + log.L(ctx).Infof("Identity '%s' verified claim='%s' verification='%s'", identity.ID, msg.Header.ID, verificationID) + identity.Messages.Verification = verificationID + } + + if existingVerifier == nil { + if err = dh.database.UpsertVerifier(ctx, verifier, database.UpsertOptimizationNew); err != nil { + return HandlerResult{Action: ActionRetry}, err + } + } + if existingIdentity == nil { + if err = dh.database.UpsertIdentity(ctx, identity, database.UpsertOptimizationNew); err != nil { + return HandlerResult{Action: ActionRetry}, err + } + } + + // If this is a node, we need to add that peer + if identity.Type == fftypes.IdentityTypeNode { + state.AddPreFinalize( + func(ctx context.Context) error { + // Tell the data exchange about this node. Treat these errors like database errors - and return for retry processing + return dh.exchange.AddPeer(ctx, identity.Profile) + }) + } + + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeIdentityConfirmed, identity.Namespace, identity.ID, nil) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, nil +} diff --git a/internal/definitions/definition_handler_identity_claim_test.go b/internal/definitions/definition_handler_identity_claim_test.go new file mode 100644 index 0000000000..7ba5e66d2c --- /dev/null +++ b/internal/definitions/definition_handler_identity_claim_test.go @@ -0,0 +1,585 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/mocks/datamocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func testOrgIdentity(t *testing.T, name string) *fftypes.Identity { + i := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeOrg, + Namespace: fftypes.SystemNamespace, + Name: name, + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: "desc", + Profile: fftypes.JSONObject{ + "some": "profiledata", + }, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + var err error + i.DID, err = i.GenerateDID(context.Background()) + assert.NoError(t, err) + return i +} + +func testCustomIdentity(t *testing.T, name string, parent *fftypes.Identity) *fftypes.Identity { + i := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeCustom, + Namespace: "ns1", + Name: name, + Parent: parent.ID, + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: "custom 1", + Profile: fftypes.JSONObject{ + "some": "profiledata", + }, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + var err error + i.DID, err = i.GenerateDID(context.Background()) + assert.NoError(t, err) + return i +} + +func testCustomClaimAndVerification(t *testing.T) (*fftypes.Identity, *fftypes.Identity, *fftypes.Message, *fftypes.Data, *fftypes.Message, *fftypes.Data) { + org1 := testOrgIdentity(t, "org1") + custom1 := testCustomIdentity(t, "custom1", org1) + + ic := &fftypes.IdentityClaim{ + Identity: custom1, + } + b, err := json.Marshal(&ic) + assert.NoError(t, err) + claimData := &fftypes.Data{ + ID: fftypes.NewUUID(), + Value: fftypes.JSONAnyPtrBytes(b), + } + + claimMsg := &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: custom1.Messages.Claim, + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityClaim, + Topics: fftypes.FFStringArray{custom1.Topic()}, + SignerRef: fftypes.SignerRef{ + Author: custom1.DID, + Key: "0x12345", + }, + }, + } + claimMsg.Hash = fftypes.NewRandB32() + + iv := &fftypes.IdentityVerification{ + Identity: custom1.IdentityBase, + Claim: fftypes.MessageRef{ + ID: claimMsg.Header.ID, + Hash: claimMsg.Hash, + }, + } + b, err = json.Marshal(&iv) + assert.NoError(t, err) + verifyData := &fftypes.Data{ + Value: fftypes.JSONAnyPtrBytes(b), + } + + verifyMsg := &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityVerification, + Topics: fftypes.FFStringArray{custom1.Topic()}, + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x2456", + }, + }, + } + + return custom1, org1, claimMsg, claimData, verifyMsg, verifyData +} + +func TestHandleDefinitionIdentityClaimCustomWithExistingParentVerificationOk(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{ + {Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), Tag: "skipped missing data"}}, + }, nil, nil) + mdi.On("UpsertIdentity", ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + assert.Equal(t, *claimMsg.Header.ID, *identity.Messages.Claim) + assert.Equal(t, *verifyMsg.Header.ID, *identity.Messages.Verification) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("UpsertVerifier", ctx, mock.MatchedBy(func(verifier *fftypes.Verifier) bool { + assert.Equal(t, fftypes.VerifierTypeEthAddress, verifier.Type) + assert.Equal(t, "0x12345", verifier.Value) + assert.Equal(t, *custom1.ID, *verifier.Identity) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityConfirmed + })).Return(nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, false, nil).Once() + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, true, nil) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + err = bs.finalizers[0](ctx) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + +} + +func TestHandleDefinitionIdentityClaimIdempotentReplay(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(custom1, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(&fftypes.Verifier{ + Identity: custom1.ID, + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }, + }, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{ + {Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), Tag: "skipped missing data"}}, + }, nil, nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityConfirmed + })).Return(nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, false, nil).Once() + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, true, nil) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + err = bs.finalizers[0](ctx) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) +} + +func TestHandleDefinitionIdentityClaimFailInsertIdentity(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{}, nil, nil) + mdi.On("UpsertVerifier", ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) + mdi.On("UpsertIdentity", ctx, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, true, nil) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimVerificationDataFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{}, nil, nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return(nil, false, fmt.Errorf("pop")) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimVerificationMissingData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{}, nil, nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimFailInsertVerifier(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{}, nil, nil) + mdi.On("UpsertVerifier", ctx, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{verifyData}, true, nil) + + bs.pendingConfirms[*verifyMsg.Header.ID] = verifyMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimCustomMissingParentVerificationOk(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return([]*fftypes.Message{}, nil, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) // Just wait for the verification to come in later + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimCustomParentVerificationFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("GetMessages", ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimVerifierClash(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(&fftypes.Verifier{ + Hash: fftypes.NewRandB32(), + }, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimVerifierError(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimIdentityClash(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + }, + }, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimIdentityError(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityMissingAuthor(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + claimMsg.Header.Author = "" + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimBadSignature(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + claimMsg.Header.Author = org1.DID // should be the child for the claim + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(org1, false, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerifyChainFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + claimMsg.Header.Author = org1.DID // should be the child for the claim + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(nil, true, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerifyChainInvalid(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, _, _ := testCustomClaimAndVerification(t) + claimMsg.Header.Author = org1.DID // should be the child for the claim + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, custom1).Return(nil, false, fmt.Errorf("wrong")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{claimData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityClaimBadData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, _, _ := testCustomClaimAndVerification(t) + claimMsg.Header.Author = org1.DID // should be the child for the claim + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, claimMsg, []*fftypes.Data{}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + bs.assertNoFinalizers() +} diff --git a/internal/definitions/definition_handler_identity_update.go b/internal/definitions/definition_handler_identity_update.go new file mode 100644 index 0000000000..5f1688e9d3 --- /dev/null +++ b/internal/definitions/definition_handler_identity_update.go @@ -0,0 +1,71 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +func (dh *definitionHandlers) handleIdentityUpdateBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data) (HandlerResult, error) { + var update fftypes.IdentityUpdate + valid := dh.getSystemBroadcastPayload(ctx, msg, data, &update) + if !valid { + return HandlerResult{Action: ActionReject}, nil + } + + // See if we find the message to which it refers + err := update.Identity.Validate(ctx) + if err != nil { + log.L(ctx).Warnf("Invalid identity update message %s: %v", msg.Header.ID, err) + return HandlerResult{Action: ActionReject}, nil + } + + // Get the existing identity (must be a confirmed identity to at the point an update is issued) + identity, err := dh.identity.CachedIdentityLookupByID(ctx, update.Identity.ID) + if err != nil { + return HandlerResult{Action: ActionRetry}, err + } + if identity == nil { + log.L(ctx).Warnf("Invalid identity update message %s - not found: %s", msg.Header.ID, update.Identity.ID) + return HandlerResult{Action: ActionReject}, nil + } + + // Check the author matches + if identity.DID != msg.Header.Author { + log.L(ctx).Warnf("Invalid identity update message %s - wrong author: %s", msg.Header.ID, msg.Header.Author) + return HandlerResult{Action: ActionReject}, nil + } + + // Update the profile + identity.IdentityProfile = update.Updates + identity.Messages.Update = msg.Header.ID + err = dh.database.UpsertIdentity(ctx, identity, database.UpsertOptimizationExisting) + if err != nil { + return HandlerResult{Action: ActionRetry}, err + } + + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeIdentityUpdated, identity.Namespace, identity.ID, nil) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, err + +} diff --git a/internal/definitions/definition_handler_identity_update_test.go b/internal/definitions/definition_handler_identity_update_test.go new file mode 100644 index 0000000000..91cb7d0549 --- /dev/null +++ b/internal/definitions/definition_handler_identity_update_test.go @@ -0,0 +1,232 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func testIdentityUpdate(t *testing.T) (*fftypes.Identity, *fftypes.Message, *fftypes.Data, *fftypes.IdentityUpdate) { + org1 := testOrgIdentity(t, "org1") + org1.Parent = fftypes.NewUUID() // Not involved in verification for updates, just must not change + + iu := &fftypes.IdentityUpdate{ + Identity: org1.IdentityBase, + Updates: fftypes.IdentityProfile{ + Profile: fftypes.JSONObject{ + "new": "profile", + }, + Description: "new description", + }, + } + b, err := json.Marshal(&iu) + assert.NoError(t, err) + updateData := &fftypes.Data{ + ID: fftypes.NewUUID(), + Value: fftypes.JSONAnyPtrBytes(b), + } + + updateMsg := &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityUpdate, + Topics: fftypes.FFStringArray{org1.Topic()}, + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", + }, + }, + } + + return org1, updateMsg, updateData, iu +} + +func TestHandleDefinitionIdentityUpdateOk(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1, updateMsg, updateData, iu := testIdentityUpdate(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("UpsertIdentity", ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + assert.Equal(t, *updateMsg.Header.ID, *identity.Messages.Update) + assert.Equal(t, org1.IdentityBase, identity.IdentityBase) + assert.Equal(t, iu.Updates, identity.IdentityProfile) + return true + }), database.UpsertOptimizationExisting).Return(nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityUpdated + })).Return(nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + err = bs.finalizers[0](ctx) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) +} + +func TestHandleDefinitionIdentityUpdateUpsertFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1, updateMsg, updateData, _ := testIdentityUpdate(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("UpsertIdentity", ctx, mock.Anything, database.UpsertOptimizationExisting).Return(fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityInvalidIdentity(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1, updateMsg, updateData, _ := testIdentityUpdate(t) + updateMsg.Header.Author = "wrong" + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityNotFound(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1, updateMsg, updateData, _ := testIdentityUpdate(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(nil, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityLookupFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1, updateMsg, updateData, _ := testIdentityUpdate(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityValidateFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1 := testOrgIdentity(t, "org1") + iu := &fftypes.IdentityUpdate{ + Identity: org1.IdentityBase, + } + iu.Identity.DID = "wrong" + b, err := json.Marshal(&iu) + assert.NoError(t, err) + updateData := &fftypes.Data{ + ID: fftypes.NewUUID(), + Value: fftypes.JSONAnyPtrBytes(b), + } + + updateMsg := &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityUpdate, + Topics: fftypes.FFStringArray{org1.Topic()}, + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", + }, + }, + } + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{updateData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityMissingData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + org1 := testOrgIdentity(t, "org1") + updateMsg := &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityUpdate, + Topics: fftypes.FFStringArray{org1.Topic()}, + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", + }, + }, + } + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, updateMsg, []*fftypes.Data{}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + bs.assertNoFinalizers() +} diff --git a/internal/definitions/definition_handler_identity_verification.go b/internal/definitions/definition_handler_identity_verification.go new file mode 100644 index 0000000000..e9ef8d8105 --- /dev/null +++ b/internal/definitions/definition_handler_identity_verification.go @@ -0,0 +1,86 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +func (dh *definitionHandlers) handleIdentityVerificationBroadcast(ctx context.Context, state DefinitionBatchState, verifyMsg *fftypes.Message, data []*fftypes.Data) (HandlerResult, error) { + var verification fftypes.IdentityVerification + valid := dh.getSystemBroadcastPayload(ctx, verifyMsg, data, &verification) + if !valid { + return HandlerResult{Action: ActionReject}, nil + } + + // See if we find the message to which it refers + err := verification.Identity.Validate(ctx) + if err != nil || verification.Identity.Parent == nil || verification.Claim.ID == nil || verification.Claim.Hash == nil { + log.L(ctx).Warnf("Invalid verification message %s: %v", verifyMsg.Header.ID, err) + return HandlerResult{Action: ActionReject}, nil + } + + // Check the verification is signed by the correct org + parent, err := dh.identity.CachedIdentityLookupByID(ctx, verification.Identity.Parent) + if err != nil { + return HandlerResult{Action: ActionRetry}, err + } + if parent == nil { + log.L(ctx).Warnf("Invalid verification message %s - parent not found: %s", verifyMsg.Header.ID, verification.Identity.Parent) + return HandlerResult{Action: ActionReject}, nil + } + if parent.DID != verifyMsg.Header.Author { + log.L(ctx).Warnf("Invalid verification message %s - parent '%s' does not match signer '%s'", verifyMsg.Header.ID, parent.DID, verifyMsg.Header.Author) + return HandlerResult{Action: ActionReject}, nil + } + + // At this point, this is a valid verification, but we don't know if the claim has arrived. + // It might be being processed in the same pin batch as us - so we can't + + // See if the message has already arrived, if so we need to queue a rewind to it + claimMsg, err := dh.database.GetMessageByID(ctx, verification.Claim.ID) + if err != nil { + return HandlerResult{Action: ActionRetry}, err + } + if claimMsg == nil || claimMsg.State != fftypes.MessageStateConfirmed { + claimMsg = state.GetPendingConfirm()[*verification.Claim.ID] + } + + if claimMsg != nil { + if !claimMsg.Hash.Equals(verification.Claim.Hash) { + log.L(ctx).Warnf("Invalid verification message %s - hash mismatch claim=%s verification=%s", verifyMsg.Header.ID, claimMsg.Hash, verification.Claim.Hash) + return HandlerResult{Action: ActionReject}, nil + } + data, foundAll, err := dh.data.GetMessageData(ctx, claimMsg, true) + if err != nil { + return HandlerResult{Action: ActionRetry}, err + } + if foundAll { + // The verification came in after the messsage, so we need to call the idempotent + // handler of the claim logic again + return dh.handleIdentityClaimBroadcast(ctx, state, claimMsg, data, verifyMsg.Header.ID) + } + } + + // Just confirm the verification - when the claim message is processed it will come back and look for + // this (now confirmed) verification message. + return HandlerResult{Action: ActionConfirm}, nil + +} diff --git a/internal/definitions/definition_handler_identity_verification_test.go b/internal/definitions/definition_handler_identity_verification_test.go new file mode 100644 index 0000000000..8d4455671a --- /dev/null +++ b/internal/definitions/definition_handler_identity_verification_test.go @@ -0,0 +1,292 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package definitions + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/mocks/datamocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestHandleDefinitionIdentityVerificationWithExistingClaimOk(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + custom1, org1, claimMsg, claimData, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + mim.On("VerifyIdentityChain", ctx, mock.Anything).Return(custom1, false, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(nil, nil) // Simulate pending confirm in same pin batch + mdi.On("GetIdentityByName", ctx, custom1.Type, custom1.Namespace, custom1.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, custom1.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "0x12345").Return(nil, nil) + mdi.On("UpsertIdentity", ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + assert.Equal(t, *claimMsg.Header.ID, *identity.Messages.Claim) + assert.Equal(t, *verifyMsg.Header.ID, *identity.Messages.Verification) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("UpsertVerifier", ctx, mock.MatchedBy(func(verifier *fftypes.Verifier) bool { + assert.Equal(t, fftypes.VerifierTypeEthAddress, verifier.Type) + assert.Equal(t, "0x12345", verifier.Value) + assert.Equal(t, *custom1.ID, *verifier.Identity) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityConfirmed + })).Return(nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{claimData}, true, nil) + + bs.pendingConfirms[*claimMsg.Header.ID] = claimMsg + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + err = bs.finalizers[0](ctx) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) +} + +func TestHandleDefinitionIdentityVerificationIncompleteClaimData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + claimMsg.State = fftypes.MessageStateConfirmed + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(claimMsg, nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return([]*fftypes.Data{}, false, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationClaimDataFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + claimMsg.State = fftypes.MessageStateConfirmed + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(claimMsg, nil) + + mdm := dh.data.(*datamocks.Manager) + mdm.On("GetMessageData", ctx, mock.Anything, true).Return(nil, false, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + mdm.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationClaimHashMismatchl(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + claimMsg.State = fftypes.MessageStateConfirmed + claimMsg.Hash = fftypes.NewRandB32() + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(claimMsg, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationBeforeClaim(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(nil, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationClaimLookupFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, claimMsg, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + mdi := dh.database.(*databasemocks.Plugin) + mdi.On("GetMessageByID", ctx, claimMsg.Header.ID).Return(nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mdi.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationWrongSigner(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, _, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + verifyMsg.Header.Author = "wrong" + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(org1, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationCheckParentNotFound(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, _, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(nil, nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationCheckParentFail(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + _, org1, _, _, verifyMsg, verifyData := testCustomClaimAndVerification(t) + + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", ctx, org1.ID).Return(nil, fmt.Errorf("pop")) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, verifyMsg, []*fftypes.Data{verifyData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationInvalidPayload(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + iv := fftypes.IdentityVerification{ + Identity: testOrgIdentity(t, "org1").IdentityBase, + // Missing message claim info + } + b, err := json.Marshal(&iv) + assert.NoError(t, err) + emptyObjectData := &fftypes.Data{ + Value: fftypes.JSONAnyPtrBytes(b), + } + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeBroadcast, + Tag: fftypes.SystemTagIdentityVerification, + }, + }, []*fftypes.Data{emptyObjectData}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + bs.assertNoFinalizers() +} + +func TestHandleDefinitionIdentityVerificationInvalidData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, &fftypes.Message{ + Header: fftypes.MessageHeader{ + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeBroadcast, + Tag: fftypes.SystemTagIdentityVerification, + }, + }, []*fftypes.Data{}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) + assert.NoError(t, err) + + bs.assertNoFinalizers() +} diff --git a/internal/definitions/definition_handler_namespace.go b/internal/definitions/definition_handler_namespace.go index 276c831cd2..55db98b405 100644 --- a/internal/definitions/definition_handler_namespace.go +++ b/internal/definitions/definition_handler_namespace.go @@ -23,42 +23,41 @@ import ( "github.com/hyperledger/firefly/pkg/fftypes" ) -func (dh *definitionHandlers) handleNamespaceBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleNamespaceBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (HandlerResult, error) { l := log.L(ctx) var ns fftypes.Namespace valid := dh.getSystemBroadcastPayload(ctx, msg, data, &ns) if !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } if err := ns.Validate(ctx, true); err != nil { l.Warnf("Unable to process namespace broadcast %s - validate failed: %s", msg.Header.ID, err) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } existing, err := dh.database.GetNamespace(ctx, ns.Name) if err != nil { - return ActionRetry, nil, err // We only return database errors + return HandlerResult{Action: ActionRetry}, err // We only return database errors } if existing != nil { if existing.Type != fftypes.NamespaceTypeLocal { l.Warnf("Unable to process namespace broadcast %s (name=%s) - duplicate of %v", msg.Header.ID, existing.Name, existing.ID) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } // Remove the local definition if err = dh.database.DeleteNamespace(ctx, existing.ID); err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } } if err = dh.database.UpsertNamespace(ctx, &ns, false); err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } - return ActionConfirm, &DefinitionBatchActions{ - Finalize: func(ctx context.Context) error { - event := fftypes.NewEvent(fftypes.EventTypeNamespaceConfirmed, ns.Name, ns.ID, tx) - return dh.database.InsertEvent(ctx, event) - }, - }, nil + state.AddFinalize(func(ctx context.Context) error { + event := fftypes.NewEvent(fftypes.EventTypeNamespaceConfirmed, ns.Name, ns.ID, tx) + return dh.database.InsertEvent(ctx, event) + }) + return HandlerResult{Action: ActionConfirm}, nil } diff --git a/internal/definitions/definition_handler_namespace_test.go b/internal/definitions/definition_handler_namespace_test.go index d30237aacb..794c858887 100644 --- a/internal/definitions/definition_handler_namespace_test.go +++ b/internal/definitions/definition_handler_namespace_test.go @@ -29,7 +29,7 @@ import ( ) func TestHandleDefinitionBroadcastNSOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -45,21 +45,21 @@ func TestHandleDefinitionBroadcastNSOk(t *testing.T) { mdi.On("GetNamespace", mock.Anything, "ns1").Return(nil, nil) mdi.On("UpsertNamespace", mock.Anything, mock.Anything, false).Return(nil) mdi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.NoError(t, err) mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastNSEventFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -75,21 +75,21 @@ func TestHandleDefinitionBroadcastNSEventFail(t *testing.T) { mdi.On("GetNamespace", mock.Anything, "ns1").Return(nil, nil) mdi.On("UpsertNamespace", mock.Anything, mock.Anything, false).Return(nil) mdi.On("InsertEvent", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastNSUpsertFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -104,31 +104,33 @@ func TestHandleDefinitionBroadcastNSUpsertFail(t *testing.T) { mdi := dh.database.(*databasemocks.Plugin) mdi.On("GetNamespace", mock.Anything, "ns1").Return(nil, nil) mdi.On("UpsertNamespace", mock.Anything, mock.Anything, false).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastNSMissingData(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastNSBadID(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{} b, err := json.Marshal(&ns) @@ -137,33 +139,35 @@ func TestHandleDefinitionBroadcastNSBadID(t *testing.T) { Value: fftypes.JSONAnyPtrBytes(b), } - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastNSBadData(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) data := &fftypes.Data{ Value: fftypes.JSONAnyPtr(`!{json`), } - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastDuplicate(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -177,19 +181,20 @@ func TestHandleDefinitionBroadcastDuplicate(t *testing.T) { mdi := dh.database.(*databasemocks.Plugin) mdi.On("GetNamespace", mock.Anything, "ns1").Return(ns, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastDuplicateOverrideLocal(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -207,21 +212,21 @@ func TestHandleDefinitionBroadcastDuplicateOverrideLocal(t *testing.T) { mdi.On("DeleteNamespace", mock.Anything, mock.Anything).Return(nil) mdi.On("UpsertNamespace", mock.Anything, mock.Anything, false).Return(nil) mdi.On("InsertEvent", mock.Anything, mock.Anything).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - err = ba.Finalize(context.Background()) + err = bs.finalizers[0](context.Background()) assert.NoError(t, err) mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastDuplicateOverrideLocalFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -237,19 +242,20 @@ func TestHandleDefinitionBroadcastDuplicateOverrideLocalFail(t *testing.T) { mdi := dh.database.(*databasemocks.Plugin) mdi.On("GetNamespace", mock.Anything, "ns1").Return(ns, nil) mdi.On("DeleteNamespace", mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastDupCheckFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, bs := newTestDefinitionHandlers(t) ns := &fftypes.Namespace{ ID: fftypes.NewUUID(), @@ -263,13 +269,14 @@ func TestHandleDefinitionBroadcastDupCheckFail(t *testing.T) { mdi := dh.database.(*databasemocks.Plugin) mdi.On("GetNamespace", mock.Anything, "ns1").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ - Tag: string(fftypes.SystemTagDefineNamespace), + Tag: fftypes.SystemTagDefineNamespace, }, }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) + bs.assertNoFinalizers() } diff --git a/internal/definitions/definition_handler_network_node.go b/internal/definitions/definition_handler_network_node.go index b8fe4bf59d..6f8d8e4a04 100644 --- a/internal/definitions/definition_handler_network_node.go +++ b/internal/definitions/definition_handler_network_node.go @@ -23,57 +23,27 @@ import ( "github.com/hyperledger/firefly/pkg/fftypes" ) -func (dh *definitionHandlers) handleNodeBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleDeprecatedNodeBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data) (HandlerResult, error) { l := log.L(ctx) - var node fftypes.Node - valid := dh.getSystemBroadcastPayload(ctx, msg, data, &node) + var nodeOld fftypes.DeprecatedNode + valid := dh.getSystemBroadcastPayload(ctx, msg, data, &nodeOld) if !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } - if err := node.Validate(ctx, true); err != nil { - l.Warnf("Unable to process node broadcast %s - validate failed: %s", msg.Header.ID, err) - return ActionReject, nil, nil - } - - owner, err := dh.database.GetOrganizationByIdentity(ctx, node.Owner) + owner, err := dh.identity.FindIdentityForVerifier(ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: dh.blockchain.VerifierType(), + Value: nodeOld.Owner, + }) if err != nil { - return ActionRetry, nil, err // We only return database errors + return HandlerResult{Action: ActionRetry}, err // We only return database errors } if owner == nil { - l.Warnf("Unable to process node broadcast %s - parent identity not found: %s", msg.Header.ID, node.Owner) - return ActionReject, nil, nil - } - - if msg.Header.Key != node.Owner { - l.Warnf("Unable to process node broadcast %s - incorrect signature. Expected=%s Received=%s", msg.Header.ID, node.Owner, msg.Header.Author) - return ActionReject, nil, nil - } - - existing, err := dh.database.GetNode(ctx, node.Owner, node.Name) - if err == nil && existing == nil { - existing, err = dh.database.GetNodeByID(ctx, node.ID) - } - if err != nil { - return ActionRetry, nil, err // We only return database errors - } - if existing != nil { - if existing.Owner != node.Owner { - l.Warnf("Unable to process node broadcast %s - mismatch with existing %v", msg.Header.ID, existing.ID) - return ActionReject, nil, nil - } - node.ID = nil // we keep the existing ID + l.Warnf("Unable to process node broadcast %s - parent identity not found: %s", msg.Header.ID, nodeOld.Owner) + return HandlerResult{Action: ActionReject}, nil } - if err = dh.database.UpsertNode(ctx, &node, true); err != nil { - return ActionRetry, nil, err - } + return dh.handleIdentityClaim(ctx, state, msg, nodeOld.AddMigratedParent(owner.ID), nil) - return ActionConfirm, &DefinitionBatchActions{ - PreFinalize: func(ctx context.Context) error { - // Tell the data exchange about this node. Treat these errors like database errors - and return for retry processing - return dh.exchange.AddPeer(ctx, node.DX) - }, - }, nil } diff --git a/internal/definitions/definition_handler_network_node_test.go b/internal/definitions/definition_handler_network_node_test.go index df78e7d3f4..2c15e39b54 100644 --- a/internal/definitions/definition_handler_network_node_test.go +++ b/internal/definitions/definition_handler_network_node_test.go @@ -24,418 +24,175 @@ import ( "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func TestHandleDefinitionBroadcastNodeOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, +const oldNodeExample = ` +{ + "header": { + "id": "811de0e6-6c07-445f-99fb-e2f457a9f140", + "type": "definition", + "txtype": "batch_pin", + "author": "did:firefly:org/f08153cc-c605-4239-9087-e08747e1fb4e", + "key": "0x214840c7c62cddf7a854a830d55018b38e4e78be", + "created": "2022-02-24T15:31:01.711696467Z", + "namespace": "ff_system", + "topics": [ + "ff_organizations" + ], + "tag": "ff_define_node", + "datahash": "e0bfd8cf53524e28d036b971dfca3dfbd1fb93bc0259d32a9874e569fdbcf814" + }, + "hash": "fdf86a889c0d7377d0c97e654d9fd9f56a9fc462d0c61162df94f902505f5a85", + "batch": "46015b75-2d90-4055-9c7c-0ca6e0529961", + "state": "confirmed", + "confirmed": "2022-02-24T15:31:03.691365677Z", + "data": [ + { + "id": "8e03051d-6bf9-4ceb-9985-ed90e60d9334", + "hash": "8ffd58985cc09fff2c8b3ca92d11e0b9d86847032d05fc98ecf7f2372f421cce", + "validator": "definition", + "value": { + "id": "d0c4f928-943d-49bc-927e-e9eb8fb8dc00", + "owner": "0x214840c7c62cddf7a854a830d55018b38e4e78be", + "name": "node_0", + "dx": { + "peer": "member_0", + "endpoint": { + "cert": "-----BEGIN CERTIFICATE-----\nMIIC1DCCAbwCCQCdQsqbIH663DANBgkqhkiG9w0BAQsFADAsMRcwFQYDVQQDDA5k\nYXRhZXhjaGFuZ2VfMDERMA8GA1UECgwIbWVtYmVyXzAwHhcNMjIwMjI0MTUzMDE1\nWhcNMjMwMjI0MTUzMDE1WjAsMRcwFQYDVQQDDA5kYXRhZXhjaGFuZ2VfMDERMA8G\nA1UECgwIbWVtYmVyXzAwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9\nItQpszirxOeONjzZQLgnp6iIUcu0v0NYhJ5QQM/a6JkzcTw+ZoxjwQZIAV/WRgVK\ngnp7Z+BXcGB7TqQsY3501tEG6st8zUgH2RHiIdPll9Uavxws2eQlrvW98STST1S8\n41OmIbetC7TWYLYvjtM2d8KjXgU96KtM6G7sVucOFxAkrM1UPrLVZOoUmUyXxery\nTzC16ssvnPHFylWwSD5PzHDRW3H+hYq6O3VE1VztZGmFQ/+9ZrPv3Io7fDyIa0vm\n7WWFiMFqO96vvh5Gnkzailaqs9ViXp4FE5c9ftEmXmzqI5YpVTI70MHlXKXoarD4\nuZnpRRcqACcBFl463WnzAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAJruH13xnlvf\nat2QgeTsxjG4EQK8TDPEIthaA1eXP/69ShHeYNM62H9qP3QCjbY0i8eN9WdEzfGI\nSIWjDdviSNgPeH4KxyRL0Yiv43en8y0E0UcbqiiQrSdqjTDITBxo61cyOEYMmPiE\nynSPnGzt+iP3C64a/dAwfgTRFihgxc9WT+TcvJoZ58vku/Zi2+uA5qn9uLDHb0gF\nKXrACRvrRqOHXKoT1dJPUBnoiEhK4roB4y2yy0CNUP+tEwGLuGpFlek0GruYYEwz\nfAYpvKW5JGdcjD2SgmJ2iWdQQkhh5rNh5pAdSmzYf/x0psHTpVg0JrSC7et2hi6K\njklYSLaI4pI=\n-----END CERTIFICATE-----\n", + "endpoint": "https://dataexchange_0:3001", + "id": "member_0" + } + }, + "created": "2022-02-24T15:31:01.670896884Z" + } + } + ] + } +` + +func testDeprecatedRootNode(t *testing.T) (*fftypes.DeprecatedNode, *fftypes.Message, *fftypes.Data) { + + var msgInOut fftypes.MessageInOut + err := json.Unmarshal([]byte(oldNodeExample), &msgInOut) + assert.NoError(t, err) + + var node fftypes.DeprecatedNode + err = json.Unmarshal(msgInOut.InlineData[0].Value.Bytes(), &node) + assert.NoError(t, err) + + return &node, &msgInOut.Message, &fftypes.Data{ + ID: msgInOut.InlineData[0].ID, + Validator: msgInOut.InlineData[0].Validator, + Namespace: msgInOut.Header.Namespace, + Hash: msgInOut.InlineData[0].Hash, + Value: msgInOut.InlineData[0].Value, } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(nil, nil) - mdi.On("GetNodeByID", mock.Anything, node.ID).Return(nil, nil) - mdi.On("UpsertNode", mock.Anything, mock.Anything, true).Return(nil) - mdx := dh.exchange.(*dataexchangemocks.Plugin) - mdx.On("AddPeer", mock.Anything, node.DX).Return(nil) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) - assert.NoError(t, err) - - err = ba.PreFinalize(context.Background()) - assert.NoError(t, err) - - mdi.AssertExpectations(t) } -func TestHandleDefinitionBroadcastNodeUpsertFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } +func TestHandleDeprecatedNodeDefinitionOK(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(nil, nil) - mdi.On("GetNodeByID", mock.Anything, node.ID).Return(nil, nil) - mdi.On("UpsertNode", mock.Anything, mock.Anything, true).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") + node, msg, data := testDeprecatedRootNode(t) + parent, _, _ := testDeprecatedRootOrg(t) - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastNodeAddPeerFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: node.Owner, + }).Return(parent.Migrated().Identity, nil) + mim.On("VerifyIdentityChain", ctx, mock.Anything).Return(parent.Migrated().Identity, false, nil) mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(nil, nil) - mdi.On("GetNodeByID", mock.Anything, node.ID).Return(nil, nil) - mdi.On("UpsertNode", mock.Anything, mock.Anything, true).Return(nil) - mdx := dh.exchange.(*dataexchangemocks.Plugin) - mdx.On("AddPeer", mock.Anything, node.DX).Return(fmt.Errorf("pop")) - action, ba, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) - assert.NoError(t, err) - err = ba.PreFinalize(context.Background()) - assert.EqualError(t, err, "pop") + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeNode, fftypes.SystemNamespace, node.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, node.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "member_0").Return(nil, nil) + mdi.On("UpsertIdentity", ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + assert.Equal(t, *msg.Header.ID, *identity.Messages.Claim) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("UpsertVerifier", ctx, mock.MatchedBy(func(verifier *fftypes.Verifier) bool { + assert.Equal(t, fftypes.VerifierTypeFFDXPeerID, verifier.Type) + assert.Equal(t, "member_0", verifier.Value) + assert.Equal(t, *node.ID, *verifier.Identity) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityConfirmed + })).Return(nil) - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastNodeDupMismatch(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } + mdx := dh.exchange.(*dataexchangemocks.Plugin) + mdx.On("AddPeer", ctx, node.DX.Endpoint).Return(nil) - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(&fftypes.Node{Owner: "0x99999"}, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := dh.HandleDefinitionBroadcast(ctx, bs, msg, []*fftypes.Data{data}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) assert.NoError(t, err) - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastNodeDupOK(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) + err = bs.preFinalizers[0](ctx) assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(&fftypes.Node{Owner: "0x23456"}, nil) - mdi.On("UpsertNode", mock.Anything, mock.Anything, true).Return(nil) - mdx := dh.exchange.(*dataexchangemocks.Plugin) - mdx.On("AddPeer", mock.Anything, node.DX).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + err = bs.finalizers[0](ctx) assert.NoError(t, err) + mim.AssertExpectations(t) mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastNodeGetFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - mdi.On("GetNode", mock.Anything, "0x23456", "node1").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") + mdx.AssertExpectations(t) - mdi.AssertExpectations(t) } -func TestHandleDefinitionBroadcastNodeBadAuthor(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } +func TestHandleDeprecatedNodeDefinitionBadData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x23456"}, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x12345", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := dh.handleDeprecatedNodeBroadcast(ctx, bs, &fftypes.Message{}, []*fftypes.Data{}) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) - mdi.AssertExpectations(t) + bs.assertNoFinalizers() } -func TestHandleDefinitionBroadcastNodeGetOrgNotFound(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } +func TestHandleDeprecatedNodeDefinitionFailOrgLookup(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(nil, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) + node, msg, data := testDeprecatedRootNode(t) - mdi.AssertExpectations(t) -} + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: node.Owner, + }).Return(nil, fmt.Errorf("pop")) -func TestHandleDefinitionBroadcastNodeGetOrgFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: "my org", - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } + action, err := dh.handleDeprecatedNodeBroadcast(ctx, bs, msg, []*fftypes.Data{data}) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) + assert.Regexp(t, "pop", err) - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") + mim.AssertExpectations(t) + bs.assertNoFinalizers() - mdi.AssertExpectations(t) } -func TestHandleDefinitionBroadcastNodeValidateFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - node := &fftypes.Node{ - ID: fftypes.NewUUID(), - Name: "node1", - Owner: "0x23456", - Description: string(make([]byte, 4097)), - DX: fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"some": "info"}, - }, - } - b, err := json.Marshal(&node) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } +func TestHandleDeprecatedNodeDefinitionOrgNotFound(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) -} + node, msg, data := testDeprecatedRootNode(t) -func TestHandleDefinitionBroadcastNodeUnmarshalFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: node.Owner, + }).Return(nil, nil) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtr(`!json`), - } - - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineNode), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := dh.handleDeprecatedNodeBroadcast(ctx, bs, msg, []*fftypes.Data{data}) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + + mim.AssertExpectations(t) + bs.assertNoFinalizers() + } diff --git a/internal/definitions/definition_handler_network_org.go b/internal/definitions/definition_handler_network_org.go index 9695f2f205..93e3c08c55 100644 --- a/internal/definitions/definition_handler_network_org.go +++ b/internal/definitions/definition_handler_network_org.go @@ -19,61 +19,17 @@ package definitions import ( "context" - "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (dh *definitionHandlers) handleOrganizationBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data) (DefinitionMessageAction, *DefinitionBatchActions, error) { - l := log.L(ctx) +func (dh *definitionHandlers) handleDeprecatedOrganizationBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data) (HandlerResult, error) { - var org fftypes.Organization - valid := dh.getSystemBroadcastPayload(ctx, msg, data, &org) + var orgOld fftypes.DeprecatedOrganization + valid := dh.getSystemBroadcastPayload(ctx, msg, data, &orgOld) if !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } - if err := org.Validate(ctx, true); err != nil { - l.Warnf("Unable to process organization broadcast %s - validate failed: %s", msg.Header.ID, err) - return ActionReject, nil, nil - } - - if org.Parent != "" { - parent, err := dh.database.GetOrganizationByIdentity(ctx, org.Parent) - if err != nil { - return ActionRetry, nil, err // We only return database errors - } - if parent == nil { - l.Warnf("Unable to process organization broadcast %s - parent identity not found: %s", msg.Header.ID, org.Parent) - return ActionReject, nil, nil - } - - if msg.Header.Key != parent.Identity { - l.Warnf("Unable to process organization broadcast %s - incorrect signature. Expected=%s Received=%s", msg.Header.ID, parent.Identity, msg.Header.Author) - return ActionReject, nil, nil - } - } - - existing, err := dh.database.GetOrganizationByIdentity(ctx, org.Identity) - if err == nil && existing == nil { - existing, err = dh.database.GetOrganizationByName(ctx, org.Name) - if err == nil && existing == nil { - existing, err = dh.database.GetOrganizationByID(ctx, org.ID) - } - } - if err != nil { - return ActionRetry, nil, err // We only return database errors - } - if existing != nil { - if existing.Parent != org.Parent { - l.Warnf("Unable to process organization broadcast %s - mismatch with existing %v", msg.Header.ID, existing.ID) - return ActionReject, nil, nil - } - org.ID = nil // we keep the existing ID - } - - if err = dh.database.UpsertOrganization(ctx, &org, true); err != nil { - return ActionRetry, nil, err - } + return dh.handleIdentityClaim(ctx, state, msg, orgOld.Migrated(), nil) - return ActionConfirm, nil, nil } diff --git a/internal/definitions/definition_handler_network_org_test.go b/internal/definitions/definition_handler_network_org_test.go index 0ede623a8e..d64c70b565 100644 --- a/internal/definitions/definition_handler_network_org_test.go +++ b/internal/definitions/definition_handler_network_org_test.go @@ -19,401 +19,114 @@ package definitions import ( "context" "encoding/json" - "fmt" "testing" "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func TestHandleDefinitionBroadcastChildOrgOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - parentOrg := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org2", - Identity: "0x23456", - Description: "parent org", - } - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, +const oldOrgExampleMessage = `{ + "header": { + "id": "6c138623-d995-4c40-b17b-81a00e7d150c", + "type": "definition", + "txtype": "batch_pin", + "author": "did:firefly:org/f08153cc-c605-4239-9087-e08747e1fb4e", + "key": "0x214840c7c62cddf7a854a830d55018b38e4e78be", + "created": "2022-02-24T15:30:58.985588799Z", + "namespace": "ff_system", + "topics": [ + "ff_organizations" + ], + "tag": "ff_define_organization", + "datahash": "ab59ea680208bef4a303f9c637efca1766228c8a8ae69881da5cc5f0512e1e1e" + }, + "hash": "be3ed2c1445c961d897f13f042e002d523d607f0557fd1c6f4597057606129dc", + "batch": "55cccf24-599e-4295-bb7a-6144cee6d91a", + "state": "confirmed", + "confirmed": "2022-02-24T15:31:01.656922592Z", + "data": [ + { + "id": "cb87d285-bbcc-4a0e-b9a2-89877894b89a", + "hash": "ee2241d6dc61fe2288b7abe65daace1e2ae18bfadbf5d905e98f4b25cdf64f9f", + "validator": "definition", + "value": { + "id": "f08153cc-c605-4239-9087-e08747e1fb4e", + "identity": "0x214840c7c62cddf7a854a830d55018b38e4e78be", + "name": "org_0", + "created": "2022-02-24T15:30:58.974970049Z" + } + } + ] + }` + +func testDeprecatedRootOrg(t *testing.T) (*fftypes.DeprecatedOrganization, *fftypes.Message, *fftypes.Data) { + + var msgInOut fftypes.MessageInOut + err := json.Unmarshal([]byte(oldOrgExampleMessage), &msgInOut) + assert.NoError(t, err) + + var org fftypes.DeprecatedOrganization + err = json.Unmarshal(msgInOut.InlineData[0].Value.Bytes(), &org) + assert.NoError(t, err) + + return &org, &msgInOut.Message, &fftypes.Data{ + ID: msgInOut.InlineData[0].ID, + Validator: msgInOut.InlineData[0].Validator, + Namespace: msgInOut.Header.Namespace, + Hash: msgInOut.InlineData[0].Hash, + Value: msgInOut.InlineData[0].Value, } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(parentOrg, nil) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(nil, nil) - mdi.On("GetOrganizationByName", mock.Anything, "org1").Return(nil, nil) - mdi.On("GetOrganizationByID", mock.Anything, org.ID).Return(nil, nil) - mdi.On("UpsertOrganization", mock.Anything, mock.Anything, true).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) - assert.NoError(t, err) - - mdi.AssertExpectations(t) } -func TestHandleDefinitionBroadcastChildOrgDupOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) +func TestHandleDeprecatedOrgDefinitionOK(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - parentOrg := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org2", - Identity: "0x23456", - Description: "parent org", - } + org, msg, data := testDeprecatedRootOrg(t) - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } + mim := dh.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", ctx, mock.Anything).Return(nil, false, nil) mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(parentOrg, nil) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(org, nil) - mdi.On("UpsertOrganization", mock.Anything, mock.Anything, true).Return(nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) - assert.NoError(t, err) - + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, org.Name).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, org.ID).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, msg.Header.Key).Return(nil, nil) + mdi.On("UpsertIdentity", ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + assert.Equal(t, *msg.Header.ID, *identity.Messages.Claim) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("UpsertVerifier", ctx, mock.MatchedBy(func(verifier *fftypes.Verifier) bool { + assert.Equal(t, fftypes.VerifierTypeEthAddress, verifier.Type) + assert.Equal(t, msg.Header.Key, verifier.Value) + assert.Equal(t, *org.ID, *verifier.Identity) + return true + }), database.UpsertOptimizationNew).Return(nil) + mdi.On("InsertEvent", mock.Anything, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Type == fftypes.EventTypeIdentityConfirmed + })).Return(nil) + + action, err := dh.HandleDefinitionBroadcast(ctx, bs, msg, []*fftypes.Data{data}, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm}, action) + assert.NoError(t, err) + + err = bs.finalizers[0](ctx) + assert.NoError(t, err) + + mim.AssertExpectations(t) mdi.AssertExpectations(t) } -func TestHandleDefinitionBroadcastChildOrgBadKey(t *testing.T) { - dh := newTestDefinitionHandlers(t) +func TestHandleDeprecatedOrgDefinitionBadData(t *testing.T) { + dh, bs := newTestDefinitionHandlers(t) + ctx := context.Background() - parentOrg := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org2", - Identity: "0x23456", - Description: "parent org", - } - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) + action, err := dh.handleDeprecatedOrganizationBroadcast(ctx, bs, &fftypes.Message{}, []*fftypes.Data{}) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(parentOrg, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x34567", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) - - mdi.AssertExpectations(t) -} -func TestHandleDefinitionBroadcastOrgDupMismatch(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "", // the mismatch - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x12345", Parent: "0x9999"}, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastOrgUpsertFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(nil, nil) - mdi.On("GetOrganizationByName", mock.Anything, "org1").Return(nil, nil) - mdi.On("GetOrganizationByID", mock.Anything, org.ID).Return(nil, nil) - mdi.On("UpsertOrganization", mock.Anything, mock.Anything, true).Return(fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "0x12345", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastOrgGetOrgFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "0x12345", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastOrgAuthorMismatch(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x12345").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Identity: "0x12345", Parent: "0x9999"}, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastGetParentFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(nil, fmt.Errorf("pop")) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) - assert.EqualError(t, err, "pop") - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastGetParentNotFound(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my org", - Profile: fftypes.JSONObject{"some": "info"}, - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - mdi := dh.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", mock.Anything, "0x23456").Return(nil, nil) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) - - mdi.AssertExpectations(t) -} - -func TestHandleDefinitionBroadcastValidateFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - Description: string(make([]byte, 4097)), - } - b, err := json.Marshal(&org) - assert.NoError(t, err) - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtrBytes(b), - } - - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) -} - -func TestHandleDefinitionBroadcastUnmarshalFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) - - data := &fftypes.Data{ - Value: fftypes.JSONAnyPtr(`!json`), - } - - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ - Header: fftypes.MessageHeader{ - Namespace: "ns1", - Identity: fftypes.Identity{ - Author: "did:firefly:org/0x23456", - Key: "0x23456", - }, - Tag: string(fftypes.SystemTagDefineOrganization), - }, - }, []*fftypes.Data{data}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) - assert.NoError(t, err) + bs.assertNoFinalizers() } diff --git a/internal/definitions/definition_handler_test.go b/internal/definitions/definition_handler_test.go index dc437129ca..7d5d31d67b 100644 --- a/internal/definitions/definition_handler_test.go +++ b/internal/definitions/definition_handler_test.go @@ -21,41 +21,78 @@ import ( "testing" "github.com/hyperledger/firefly/mocks/assetmocks" + "github.com/hyperledger/firefly/mocks/blockchainmocks" "github.com/hyperledger/firefly/mocks/broadcastmocks" "github.com/hyperledger/firefly/mocks/contractmocks" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/mocks/datamocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/mocks/privatemessagingmocks" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func newTestDefinitionHandlers(t *testing.T) *definitionHandlers { +func newTestDefinitionHandlers(t *testing.T) (*definitionHandlers, *testDefinitionBatchState) { mdi := &databasemocks.Plugin{} + mbi := &blockchainmocks.Plugin{} mdx := &dataexchangemocks.Plugin{} mdm := &datamocks.Manager{} + mim := &identitymanagermocks.Manager{} mbm := &broadcastmocks.Manager{} mpm := &privatemessagingmocks.Manager{} mam := &assetmocks.Manager{} mcm := &contractmocks.Manager{} - return NewDefinitionHandlers(mdi, mdx, mdm, mbm, mpm, mam, mcm).(*definitionHandlers) + mbi.On("VerifierType").Return(fftypes.VerifierTypeEthAddress).Maybe() + return NewDefinitionHandlers(mdi, mbi, mdx, mdm, mim, mbm, mpm, mam, mcm).(*definitionHandlers), newTestDefinitionBatchState(t) +} + +type testDefinitionBatchState struct { + t *testing.T + preFinalizers []func(ctx context.Context) error + finalizers []func(ctx context.Context) error + pendingConfirms map[fftypes.UUID]*fftypes.Message +} + +func newTestDefinitionBatchState(t *testing.T) *testDefinitionBatchState { + return &testDefinitionBatchState{ + t: t, + pendingConfirms: make(map[fftypes.UUID]*fftypes.Message), + } +} + +func (bs *testDefinitionBatchState) AddPreFinalize(pf func(ctx context.Context) error) { + bs.preFinalizers = append(bs.preFinalizers, pf) +} + +func (bs *testDefinitionBatchState) AddFinalize(pf func(ctx context.Context) error) { + bs.finalizers = append(bs.finalizers, pf) +} + +func (bs *testDefinitionBatchState) GetPendingConfirm() map[fftypes.UUID]*fftypes.Message { + return bs.pendingConfirms +} + +func (bs *testDefinitionBatchState) assertNoFinalizers() { + assert.Empty(bs.t, bs.preFinalizers) + assert.Empty(bs.t, bs.finalizers) } func TestHandleDefinitionBroadcastUnknown(t *testing.T) { - dh := newTestDefinitionHandlers(t) - action, _, err := dh.HandleDefinitionBroadcast(context.Background(), &fftypes.Message{ + dh, bs := newTestDefinitionHandlers(t) + action, err := dh.HandleDefinitionBroadcast(context.Background(), bs, &fftypes.Message{ Header: fftypes.MessageHeader{ Tag: "unknown", }, }, []*fftypes.Data{}, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestGetSystemBroadcastPayloadMissingData(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) valid := dh.getSystemBroadcastPayload(context.Background(), &fftypes.Message{ Header: fftypes.MessageHeader{ Tag: "unknown", @@ -65,7 +102,7 @@ func TestGetSystemBroadcastPayloadMissingData(t *testing.T) { } func TestGetSystemBroadcastPayloadBadJSON(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) valid := dh.getSystemBroadcastPayload(context.Background(), &fftypes.Message{ Header: fftypes.MessageHeader{ Tag: "unknown", @@ -77,7 +114,7 @@ func TestGetSystemBroadcastPayloadBadJSON(t *testing.T) { func TestPrivateMessagingPassthroughs(t *testing.T) { ctx := context.Background() - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mpm := dh.messaging.(*privatemessagingmocks.Manager) mpm.On("GetGroupByID", ctx, mock.Anything).Return(nil, nil) mpm.On("GetGroupsNS", ctx, "ns1", mock.Anything).Return(nil, nil, nil) diff --git a/internal/definitions/definition_handler_tokenpool.go b/internal/definitions/definition_handler_tokenpool.go index 071b901a6b..1def4a9048 100644 --- a/internal/definitions/definition_handler_tokenpool.go +++ b/internal/definitions/definition_handler_tokenpool.go @@ -42,42 +42,45 @@ func (dh *definitionHandlers) persistTokenPool(ctx context.Context, announce *ff return true, nil } -func (dh *definitionHandlers) handleTokenPoolBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data) (DefinitionMessageAction, *DefinitionBatchActions, error) { +func (dh *definitionHandlers) handleTokenPoolBroadcast(ctx context.Context, state DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data) (HandlerResult, error) { var announce fftypes.TokenPoolAnnouncement if valid := dh.getSystemBroadcastPayload(ctx, msg, data, &announce); !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject}, nil } pool := announce.Pool pool.Message = msg.Header.ID + // Set an event correlator, so that if we reject then the sync-async bridge action can know + // from the event (without downloading and parsing the msg) + correlator := pool.ID + if err := pool.Validate(ctx); err != nil { log.L(ctx).Warnf("Token pool '%s' rejected - validate failed: %s", pool.ID, err) - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject, CustomCorrelator: correlator}, nil } // Check if pool has already been confirmed on chain (and confirm the message if so) if existingPool, err := dh.database.GetTokenPoolByID(ctx, pool.ID); err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } else if existingPool != nil && existingPool.State == fftypes.TokenPoolStateConfirmed { - return ActionConfirm, nil, nil + return HandlerResult{Action: ActionConfirm, CustomCorrelator: correlator}, nil } if valid, err := dh.persistTokenPool(ctx, &announce); err != nil { - return ActionRetry, nil, err + return HandlerResult{Action: ActionRetry}, err } else if !valid { - return ActionReject, nil, nil + return HandlerResult{Action: ActionReject, CustomCorrelator: correlator}, nil } // Message will remain unconfirmed, but plugin will be notified to activate the pool // This will ultimately trigger a pool creation event and a rewind - return ActionWait, &DefinitionBatchActions{ - PreFinalize: func(ctx context.Context) error { - if err := dh.assets.ActivateTokenPool(ctx, pool, announce.Event); err != nil { - log.L(ctx).Errorf("Failed to activate token pool '%s': %s", pool.ID, err) - return err - } - return nil - }, - }, nil + state.AddPreFinalize(func(ctx context.Context) error { + if err := dh.assets.ActivateTokenPool(ctx, pool, announce.Event); err != nil { + log.L(ctx).Errorf("Failed to activate token pool '%s': %s", pool.ID, err) + return err + } + return nil + }) + return HandlerResult{Action: ActionWait, CustomCorrelator: correlator}, nil } diff --git a/internal/definitions/definition_handler_tokenpool_test.go b/internal/definitions/definition_handler_tokenpool_test.go index c246fb11ef..2d466a5258 100644 --- a/internal/definitions/definition_handler_tokenpool_test.go +++ b/internal/definitions/definition_handler_tokenpool_test.go @@ -53,7 +53,7 @@ func buildPoolDefinitionMessage(announce *fftypes.TokenPoolAnnouncement) (*fftyp msg := &fftypes.Message{ Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), - Tag: string(fftypes.SystemTagDefinePool), + Tag: fftypes.SystemTagDefinePool, }, } b, err := json.Marshal(announce) @@ -67,7 +67,7 @@ func buildPoolDefinitionMessage(announce *fftypes.TokenPoolAnnouncement) (*fftyp } func TestHandleDefinitionBroadcastTokenPoolActivateOK(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -82,18 +82,18 @@ func TestHandleDefinitionBroadcastTokenPoolActivateOK(t *testing.T) { })).Return(nil) mam.On("ActivateTokenPool", context.Background(), mock.AnythingOfType("*fftypes.TokenPool"), mock.AnythingOfType("*fftypes.BlockchainEvent")).Return(nil) - action, ba, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionWait, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionWait, CustomCorrelator: pool.ID}, action) assert.NoError(t, err) - err = ba.PreFinalize(context.Background()) + err = bs.preFinalizers[0](context.Background()) assert.NoError(t, err) mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastTokenPoolGetPoolFail(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -103,15 +103,16 @@ func TestHandleDefinitionBroadcastTokenPoolGetPoolFail(t *testing.T) { mdi := sh.database.(*databasemocks.Plugin) mdi.On("GetTokenPoolByID", context.Background(), pool.ID).Return(nil, fmt.Errorf("pop")) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastTokenPoolExisting(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -126,15 +127,17 @@ func TestHandleDefinitionBroadcastTokenPoolExisting(t *testing.T) { })).Return(nil) mam.On("ActivateTokenPool", context.Background(), mock.AnythingOfType("*fftypes.TokenPool"), mock.AnythingOfType("*fftypes.BlockchainEvent")).Return(nil) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionWait, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionWait, CustomCorrelator: pool.ID}, action) + assert.NoError(t, err) + + err = bs.preFinalizers[0](context.Background()) assert.NoError(t, err) - mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastTokenPoolExistingConfirmed(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -147,15 +150,15 @@ func TestHandleDefinitionBroadcastTokenPoolExistingConfirmed(t *testing.T) { mdi := sh.database.(*databasemocks.Plugin) mdi.On("GetTokenPoolByID", context.Background(), pool.ID).Return(existing, nil) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionConfirm, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionConfirm, CustomCorrelator: pool.ID}, action) assert.NoError(t, err) mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastTokenPoolIDMismatch(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -168,15 +171,16 @@ func TestHandleDefinitionBroadcastTokenPoolIDMismatch(t *testing.T) { return *p.ID == *pool.ID && p.Message == msg.Header.ID })).Return(database.IDMismatch) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject, CustomCorrelator: pool.ID}, action) assert.NoError(t, err) mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastTokenPoolFailUpsert(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -189,15 +193,16 @@ func TestHandleDefinitionBroadcastTokenPoolFailUpsert(t *testing.T) { return *p.ID == *pool.ID && p.Message == msg.Header.ID })).Return(fmt.Errorf("pop")) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionRetry, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionRetry}, action) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastTokenPoolActivateFail(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := newPoolAnnouncement() pool := announce.Pool @@ -212,18 +217,18 @@ func TestHandleDefinitionBroadcastTokenPoolActivateFail(t *testing.T) { })).Return(nil) mam.On("ActivateTokenPool", context.Background(), mock.AnythingOfType("*fftypes.TokenPool"), mock.AnythingOfType("*fftypes.BlockchainEvent")).Return(fmt.Errorf("pop")) - action, batchAction, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionWait, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionWait, CustomCorrelator: pool.ID}, action) assert.NoError(t, err) - err = batchAction.PreFinalize(context.Background()) + err = bs.preFinalizers[0](context.Background()) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) } func TestHandleDefinitionBroadcastTokenPoolValidateFail(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) announce := &fftypes.TokenPoolAnnouncement{ Pool: &fftypes.TokenPool{}, @@ -232,22 +237,24 @@ func TestHandleDefinitionBroadcastTokenPoolValidateFail(t *testing.T) { msg, data, err := buildPoolDefinitionMessage(announce) assert.NoError(t, err) - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, data, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, data, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } func TestHandleDefinitionBroadcastTokenPoolBadMessage(t *testing.T) { - sh := newTestDefinitionHandlers(t) + sh, bs := newTestDefinitionHandlers(t) msg := &fftypes.Message{ Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), - Tag: string(fftypes.SystemTagDefinePool), + Tag: fftypes.SystemTagDefinePool, }, } - action, _, err := sh.HandleDefinitionBroadcast(context.Background(), msg, nil, fftypes.NewUUID()) - assert.Equal(t, ActionReject, action) + action, err := sh.HandleDefinitionBroadcast(context.Background(), bs, msg, nil, fftypes.NewUUID()) + assert.Equal(t, HandlerResult{Action: ActionReject}, action) assert.NoError(t, err) + bs.assertNoFinalizers() } diff --git a/internal/definitions/reply_sender_test.go b/internal/definitions/reply_sender_test.go index 074d79a91d..8c590e4390 100644 --- a/internal/definitions/reply_sender_test.go +++ b/internal/definitions/reply_sender_test.go @@ -29,7 +29,7 @@ import ( ) func TestSendReplyBroadcastFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mms := &sysmessagingmocks.MessageSender{} mbm := dh.broadcast.(*broadcastmocks.Manager) mbm.On("NewBroadcast", "ns1", mock.Anything).Return(mms) @@ -45,7 +45,7 @@ func TestSendReplyBroadcastFail(t *testing.T) { } func TestSendReplyPrivateFail(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) mms := &sysmessagingmocks.MessageSender{} mpm := dh.messaging.(*privatemessagingmocks.Manager) mpm.On("NewMessage", "ns1", mock.Anything).Return(mms) @@ -67,7 +67,7 @@ func TestSendReplyPrivateFail(t *testing.T) { } func TestSendReplyPrivateOk(t *testing.T) { - dh := newTestDefinitionHandlers(t) + dh, _ := newTestDefinitionHandlers(t) msg := &fftypes.Message{ Header: fftypes.MessageHeader{ diff --git a/internal/events/aggregator.go b/internal/events/aggregator.go index fbb058fc95..4e2459e5e4 100644 --- a/internal/events/aggregator.go +++ b/internal/events/aggregator.go @@ -24,9 +24,11 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/internal/data" "github.com/hyperledger/firefly/internal/definitions" + "github.com/hyperledger/firefly/internal/identity" "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/internal/metrics" "github.com/hyperledger/firefly/internal/retry" + "github.com/hyperledger/firefly/pkg/blockchain" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" ) @@ -36,29 +38,33 @@ const ( ) type aggregator struct { - ctx context.Context - database database.Plugin - definitions definitions.DefinitionHandlers - data data.Manager - eventPoller *eventPoller - newPins chan int64 - offchainBatches chan *fftypes.UUID - queuedRewinds chan *fftypes.UUID - retry *retry.Retry - metrics metrics.Manager + ctx context.Context + database database.Plugin + definitions definitions.DefinitionHandlers + identity identity.Manager + data data.Manager + eventPoller *eventPoller + verifierType fftypes.VerifierType + newPins chan int64 + rewindBatches chan *fftypes.UUID + queuedRewinds chan *fftypes.UUID + retry *retry.Retry + metrics metrics.Manager } -func newAggregator(ctx context.Context, di database.Plugin, sh definitions.DefinitionHandlers, dm data.Manager, en *eventNotifier, mm metrics.Manager) *aggregator { +func newAggregator(ctx context.Context, di database.Plugin, bi blockchain.Plugin, sh definitions.DefinitionHandlers, im identity.Manager, dm data.Manager, en *eventNotifier, mm metrics.Manager) *aggregator { batchSize := config.GetInt(config.EventAggregatorBatchSize) ag := &aggregator{ - ctx: log.WithLogField(ctx, "role", "aggregator"), - database: di, - definitions: sh, - data: dm, - newPins: make(chan int64), - offchainBatches: make(chan *fftypes.UUID, 1), // hops to queuedRewinds with a shouldertab on the event poller - queuedRewinds: make(chan *fftypes.UUID, batchSize), - metrics: mm, + ctx: log.WithLogField(ctx, "role", "aggregator"), + database: di, + definitions: sh, + identity: im, + data: dm, + verifierType: bi.VerifierType(), + newPins: make(chan int64), + rewindBatches: make(chan *fftypes.UUID, 1), // hops to queuedRewinds with a shouldertab on the event poller + queuedRewinds: make(chan *fftypes.UUID, batchSize), + metrics: mm, } firstEvent := fftypes.SubOptsFirstEvent(config.GetString(config.EventAggregatorFirstEvent)) ag.eventPoller = newEventPoller(ctx, di, en, &eventPollerConf{ @@ -88,14 +94,14 @@ func newAggregator(ctx context.Context, di database.Plugin, sh definitions.Defin } func (ag *aggregator) start() { - go ag.offchainListener() + go ag.batchRewindListener() ag.eventPoller.start() } -func (ag *aggregator) offchainListener() { +func (ag *aggregator) batchRewindListener() { for { select { - case uuid := <-ag.offchainBatches: + case uuid := <-ag.rewindBatches: ag.queuedRewinds <- uuid ag.eventPoller.shoulderTap() case <-ag.ctx.Done(): @@ -249,6 +255,46 @@ func (ag *aggregator) processPins(ctx context.Context, pins []*fftypes.Pin, stat return err } +func (ag *aggregator) checkOnchainConsistency(ctx context.Context, msg *fftypes.Message, pin *fftypes.Pin) (valid bool, err error) { + l := log.L(ctx) + + verifierRef := &fftypes.VerifierRef{ + Type: ag.verifierType, + Value: pin.Signer, + } + + if msg.Header.Key == "" || msg.Header.Key != pin.Signer { + l.Errorf("Invalid message '%s'. Key '%s' does not match the signer of the pin: %s", msg.Header.ID, msg.Header.Key, pin.Signer) + return false, nil // This is not retryable. skip this message + } + + // Verify that we can resolve the signing key back to the identity that is claimed in the batch. + resolvedAuthor, err := ag.identity.FindIdentityForVerifier(ctx, []fftypes.IdentityType{ + fftypes.IdentityTypeOrg, + fftypes.IdentityTypeCustom, + }, msg.Header.Namespace, verifierRef) + if err != nil { + return false, err + } + if resolvedAuthor == nil { + if msg.Header.Type == fftypes.MessageTypeDefinition && + (msg.Header.Tag == fftypes.SystemTagIdentityClaim || msg.Header.Tag == fftypes.DeprecatedSystemTagDefineNode || msg.Header.Tag == fftypes.DeprecatedSystemTagDefineOrganization) { + // We defer detailed checking of this identity to the system handler + return true, nil + } else if msg.Header.Type != fftypes.MessageTypePrivate { + // Only private messages, or root org broadcasts can have an unregistered key + l.Errorf("Invalid message '%s'. Author '%s' cound not be resolved: %s", msg.Header.ID, msg.Header.Author, err) + return false, nil // This is not retryable. skip this batch + } + } else if msg.Header.Author == "" || resolvedAuthor.DID != msg.Header.Author { + l.Errorf("Invalid message '%s'. Author '%s' does not match identity registered to %s: %s (%s)", msg.Header.ID, msg.Header.Author, verifierRef.Value, resolvedAuthor.DID, resolvedAuthor.ID) + return false, nil // This is not retryable. skip this batch + + } + + return true, nil +} + func (ag *aggregator) processMessage(ctx context.Context, batch *fftypes.Batch, pin *fftypes.Pin, msgBaseIndex int64, msg *fftypes.Message, state *batchState) (err error) { l := log.L(ctx) @@ -290,7 +336,7 @@ func (ag *aggregator) processMessage(ctx context.Context, batch *fftypes.Batch, } l.Debugf("Attempt dispatch msg=%s broadcastContexts=%v privatePins=%v", msg.Header.ID, unmaskedContexts, msg.Pins) - dispatched, err := ag.attemptMessageDispatch(ctx, msg, batch.Payload.TX.ID, state) + dispatched, err := ag.attemptMessageDispatch(ctx, msg, batch.Payload.TX.ID, state, pin) if err != nil { return err } @@ -312,7 +358,7 @@ func (ag *aggregator) processMessage(ctx context.Context, batch *fftypes.Batch, return nil } -func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.Message, tx *fftypes.UUID, state *batchState) (bool, error) { +func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.Message, tx *fftypes.UUID, state *batchState, pin *fftypes.Pin) (bool, error) { // If we don't find all the data, then we don't dispatch data, foundAll, err := ag.data.GetMessageData(ctx, msg, true) @@ -320,6 +366,11 @@ func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.M return false, err } + // Check the pin signer is valid for the message + if valid, err := ag.checkOnchainConsistency(ctx, msg, pin); err != nil || !valid { + return false, err + } + // Verify we have all the blobs for the data if resolved, err := ag.resolveBlobs(ctx, data); err != nil || !resolved { return false, err @@ -342,22 +393,20 @@ func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.M // Validate the message data valid := true + var customCorrelator *fftypes.UUID switch { case msg.Header.Type == fftypes.MessageTypeDefinition: // We handle definition events in-line on the aggregator, as it would be confusing for apps to be // dispatched subsequent events before we have processed the definition events they depend on. - msgAction, batchAction, err := ag.definitions.HandleDefinitionBroadcast(ctx, msg, data, tx) - if msgAction == definitions.ActionRetry { + handlerResult, err := ag.definitions.HandleDefinitionBroadcast(ctx, state, msg, data, tx) + if handlerResult.Action == definitions.ActionRetry { return false, err } - if batchAction != nil { - state.AddPreFinalize(batchAction.PreFinalize) - state.AddFinalize(batchAction.Finalize) - } - if msgAction == definitions.ActionWait { + if handlerResult.Action == definitions.ActionWait { return false, nil } - valid = msgAction == definitions.ActionConfirm + customCorrelator = handlerResult.CustomCorrelator + valid = handlerResult.Action == definitions.ActionConfirm case msg.Header.Type == fftypes.MessageTypeGroupInit: // Already handled as part of resolving the context - do nothing. @@ -371,7 +420,9 @@ func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.M status := fftypes.MessageStateConfirmed eventType := fftypes.EventTypeMessageConfirmed - if !valid { + if valid { + state.pendingConfirms[*msg.Header.ID] = msg + } else { status = fftypes.MessageStateRejected eventType = fftypes.EventTypeMessageRejected } @@ -387,10 +438,15 @@ func (ag *aggregator) attemptMessageDispatch(ctx context.Context, msg *fftypes.M // Generate the appropriate event event := fftypes.NewEvent(eventType, msg.Header.Namespace, msg.Header.ID, tx) + event.Correlator = msg.Header.CID + if customCorrelator != nil { + // Definition handlers can set a custom event correlator (such as a token pool ID) + event.Correlator = customCorrelator + } if err = ag.database.InsertEvent(ctx, event); err != nil { return err } - log.L(ctx).Infof("Emitting %s %s for message %s:%s", eventType, event.ID, msg.Header.Namespace, msg.Header.ID) + log.L(ctx).Infof("Emitting %s %s for message %s:%s (correlator=%v)", eventType, event.ID, msg.Header.Namespace, msg.Header.ID, event.Correlator) return nil }) if ag.metrics.IsMetricsEnabled() { diff --git a/internal/events/aggregator_batch_state.go b/internal/events/aggregator_batch_state.go index 1d1be34c50..00820b2e67 100644 --- a/internal/events/aggregator_batch_state.go +++ b/internal/events/aggregator_batch_state.go @@ -35,6 +35,7 @@ func newBatchState(ag *aggregator) *batchState { maskedContexts: make(map[fftypes.Bytes32]*nextPinGroupState), unmaskedContexts: make(map[fftypes.Bytes32]*contextState), dispatchedMessages: make([]*dispatchedMessage, 0), + pendingConfirms: make(map[fftypes.UUID]*fftypes.Message), PreFinalize: make([]func(ctx context.Context) error, 0), Finalize: make([]func(ctx context.Context) error, 0), @@ -93,6 +94,7 @@ type batchState struct { maskedContexts map[fftypes.Bytes32]*nextPinGroupState unmaskedContexts map[fftypes.Bytes32]*contextState dispatchedMessages []*dispatchedMessage + pendingConfirms map[fftypes.UUID]*fftypes.Message // PreFinalize callbacks may perform blocking actions (possibly to an external connector) // - Will execute after all batch messages have been processed @@ -119,6 +121,10 @@ func (bs *batchState) AddFinalize(action func(ctx context.Context) error) { } } +func (bs *batchState) GetPendingConfirm() map[fftypes.UUID]*fftypes.Message { + return bs.pendingConfirms +} + func (bs *batchState) RunPreFinalize(ctx context.Context) error { for _, action := range bs.PreFinalize { if err := action(ctx); err != nil { @@ -200,7 +206,7 @@ func (bs *batchState) CheckMaskedContextReady(ctx context.Context, msg *fftypes. } } if nextPin == nil || nextPin.Identity != msg.Header.Author { - l.Warnf("Mismatched nexthash or author group=%s topic=%s context=%s pin=%s nextHash=%+v", msg.Header.Group, topic, contextUnmasked, pin, nextPin) + l.Warnf("Mismatched nexthash or author group=%s topic=%s context=%s pin=%s nextHash=%+v author=%s", msg.Header.Group, topic, contextUnmasked, pin, nextPin, msg.Header.Author) return nil, nil } return &nextPinState{ diff --git a/internal/events/aggregator_test.go b/internal/events/aggregator_test.go index 5fbd93287c..7b30307079 100644 --- a/internal/events/aggregator_test.go +++ b/internal/events/aggregator_test.go @@ -25,9 +25,11 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/internal/definitions" "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/mocks/blockchainmocks" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/datamocks" "github.com/hyperledger/firefly/mocks/definitionsmocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/mocks/metricsmocks" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" @@ -35,27 +37,29 @@ import ( "github.com/stretchr/testify/mock" ) -func newTestAggregator() (*aggregator, func()) { +func newTestAggregatorCommon(metrics bool) (*aggregator, func()) { mdi := &databasemocks.Plugin{} mdm := &datamocks.Manager{} msh := &definitionsmocks.DefinitionHandlers{} + mim := &identitymanagermocks.Manager{} mmi := &metricsmocks.Manager{} - mmi.On("IsMetricsEnabled").Return(false) + mbi := &blockchainmocks.Plugin{} + if metrics { + mmi.On("MessageConfirmed", mock.Anything, fftypes.EventTypeMessageConfirmed).Return() + } + mmi.On("IsMetricsEnabled").Return(metrics) + mbi.On("VerifierType").Return(fftypes.VerifierTypeEthAddress) ctx, cancel := context.WithCancel(context.Background()) - ag := newAggregator(ctx, mdi, msh, mdm, newEventNotifier(ctx, "ut"), mmi) + ag := newAggregator(ctx, mdi, mbi, msh, mim, mdm, newEventNotifier(ctx, "ut"), mmi) return ag, cancel } func newTestAggregatorWithMetrics() (*aggregator, func()) { - mdi := &databasemocks.Plugin{} - mdm := &datamocks.Manager{} - msh := &definitionsmocks.DefinitionHandlers{} - mmi := &metricsmocks.Manager{} - mmi.On("MessageConfirmed", mock.Anything, fftypes.EventTypeMessageConfirmed).Return() - mmi.On("IsMetricsEnabled").Return(true) - ctx, cancel := context.WithCancel(context.Background()) - ag := newAggregator(ctx, mdi, msh, mdm, newEventNotifier(ctx, "ut"), mmi) - return ag, cancel + return newTestAggregatorCommon(true) +} + +func newTestAggregator() (*aggregator, func()) { + return newTestAggregatorCommon(false) } func TestAggregationMaskedZeroNonceMatch(t *testing.T) { @@ -65,8 +69,8 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { bs := newBatchState(ag) // Generate some pin data - member1org := "org1" - member2org := "org2" + member1org := newTestOrg("org1") + member2org := newTestOrg("org2") member2key := "0x23456" topic := "some-topic" batchID := fftypes.NewUUID() @@ -77,13 +81,19 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { h.Write((*groupID)[:]) contextUnmasked := fftypes.HashResult(h) initNPG := &nextPinGroupState{topic: topic, groupID: groupID} - member1NonceZero := initNPG.calcPinHash(member1org, 0) - member2NonceZero := initNPG.calcPinHash(member2org, 0) - member2NonceOne := initNPG.calcPinHash(member2org, 1) + member1NonceZero := initNPG.calcPinHash(member1org.DID, 0) + member2NonceZero := initNPG.calcPinHash(member2org.DID, 0) + member2NonceOne := initNPG.calcPinHash(member2org.DID, 1) mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) msh := ag.definitions.(*definitionsmocks.DefinitionHandlers) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg, fftypes.IdentityTypeCustom}, "ns1", &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: member2key, + }).Return(member2org, nil) // Get the batch mdi.On("GetBatchByID", ag.ctx, batchID).Return(&fftypes.Batch{ @@ -92,11 +102,12 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { Messages: []*fftypes.Message{ { Header: fftypes.MessageHeader{ - ID: msgID, - Group: groupID, - Topics: []string{topic}, - Identity: fftypes.Identity{ - Author: member2org, + ID: msgID, + Group: groupID, + Namespace: "ns1", + Topics: []string{topic}, + SignerRef: fftypes.SignerRef{ + Author: member2org.DID, Key: member2key, }, }, @@ -114,8 +125,8 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { msh.On("ResolveInitGroup", ag.ctx, mock.Anything).Return(&fftypes.Group{ GroupIdentity: fftypes.GroupIdentity{ Members: fftypes.Members{ - {Identity: member1org}, - {Identity: member2org}, + {Identity: member1org.DID}, + {Identity: member2org.DID}, }, }, }, nil) @@ -169,6 +180,7 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { Hash: member2NonceZero, Batch: batchID, Index: 0, + Signer: member2key, Dispatched: false, }, }, bs) @@ -177,6 +189,8 @@ func TestAggregationMaskedZeroNonceMatch(t *testing.T) { err = bs.RunFinalize(ag.ctx) assert.NoError(t, err) + assert.NotNil(t, bs.GetPendingConfirm()[*msgID]) + mdi.AssertExpectations(t) mdm.AssertExpectations(t) } @@ -188,8 +202,8 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { defer cancel() // Generate some pin data - member1org := "org1" - member2org := "org2" + member1org := newTestOrg("org1") + member2org := newTestOrg("org2") member2key := "0x12345" topic := "some-topic" batchID := fftypes.NewUUID() @@ -200,12 +214,18 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { h.Write((*groupID)[:]) contextUnmasked := fftypes.HashResult(h) initNPG := &nextPinGroupState{topic: topic, groupID: groupID} - member1Nonce100 := initNPG.calcPinHash(member1org, 100) - member2Nonce500 := initNPG.calcPinHash(member2org, 500) - member2Nonce501 := initNPG.calcPinHash(member2org, 501) + member1Nonce100 := initNPG.calcPinHash(member1org.DID, 100) + member2Nonce500 := initNPG.calcPinHash(member2org.DID, 500) + member2Nonce501 := initNPG.calcPinHash(member2org.DID, 501) mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg, fftypes.IdentityTypeCustom}, "ns1", &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: member2key, + }).Return(member2org, nil) rag := mdi.On("RunAsGroup", mock.Anything, mock.Anything).Maybe() rag.RunFn = func(a mock.Arguments) { @@ -219,11 +239,12 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { Messages: []*fftypes.Message{ { Header: fftypes.MessageHeader{ - ID: msgID, - Group: groupID, - Topics: []string{topic}, - Identity: fftypes.Identity{ - Author: member2org, + ID: msgID, + Group: groupID, + Namespace: "ns1", + Topics: []string{topic}, + SignerRef: fftypes.SignerRef{ + Author: member2org.DID, Key: member2key, }, }, @@ -237,8 +258,8 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { }, nil) // Look for existing nextpins - none found, first on context mdi.On("GetNextPins", ag.ctx, mock.Anything).Return([]*fftypes.NextPin{ - {Context: contextUnmasked, Identity: member1org, Hash: member1Nonce100, Nonce: 100, Sequence: 929}, - {Context: contextUnmasked, Identity: member2org, Hash: member2Nonce500, Nonce: 500, Sequence: 424}, + {Context: contextUnmasked, Identity: member1org.DID, Hash: member1Nonce100, Nonce: 100, Sequence: 929}, + {Context: contextUnmasked, Identity: member2org.DID, Hash: member2Nonce500, Nonce: 500, Sequence: 424}, }, nil, nil).Once() // Validate the message is ok mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -274,6 +295,7 @@ func TestAggregationMaskedNextSequenceMatch(t *testing.T) { Hash: member2Nonce500, Batch: batchID, Index: 0, + Signer: member2key, Dispatched: false, }, }) @@ -290,6 +312,8 @@ func TestAggregationBroadcast(t *testing.T) { bs := newBatchState(ag) // Generate some pin data + member1org := newTestOrg("org1") + member1key := "0x12345" topic := "some-topic" batchID := fftypes.NewUUID() msgID := fftypes.NewUUID() @@ -299,20 +323,25 @@ func TestAggregationBroadcast(t *testing.T) { mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg, fftypes.IdentityTypeCustom}, "ns1", &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: member1key, + }).Return(member1org, nil) // Get the batch - member1org := "org1" - member1key := "0x12345" mdi.On("GetBatchByID", ag.ctx, batchID).Return(&fftypes.Batch{ ID: batchID, Payload: fftypes.BatchPayload{ Messages: []*fftypes.Message{ { Header: fftypes.MessageHeader{ - ID: msgID, - Topics: []string{topic}, - Identity: fftypes.Identity{ - Author: member1org, + ID: msgID, + Topics: []string{topic}, + Namespace: "ns1", + SignerRef: fftypes.SignerRef{ + Author: member1org.DID, Key: member1key, }, }, @@ -345,6 +374,7 @@ func TestAggregationBroadcast(t *testing.T) { Hash: contextUnmasked, Batch: batchID, Index: 0, + Signer: member1key, Dispatched: false, }, }, bs) @@ -604,11 +634,18 @@ func TestProcessMsgFailPinUpdate(t *testing.T) { defer cancel() bs := newBatchState(ag) pin := fftypes.NewRandB32() + org1 := newTestOrg("org1") mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, []fftypes.IdentityType{fftypes.IdentityTypeOrg, fftypes.IdentityTypeCustom}, "ns1", &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }).Return(org1, nil) mdi.On("GetNextPins", ag.ctx, mock.Anything).Return([]*fftypes.NextPin{ - {Context: fftypes.NewRandB32(), Hash: pin}, + {Context: fftypes.NewRandB32(), Hash: pin, Identity: org1.DID}, }, nil, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdm.On("ValidateAll", ag.ctx, mock.Anything).Return(false, nil) @@ -616,11 +653,16 @@ func TestProcessMsgFailPinUpdate(t *testing.T) { mdi.On("UpdateMessage", ag.ctx, mock.Anything, mock.Anything).Return(nil) mdi.On("UpdateNextPin", ag.ctx, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - err := ag.processMessage(ag.ctx, &fftypes.Batch{}, &fftypes.Pin{Masked: true, Sequence: 12345}, 10, &fftypes.Message{ + err := ag.processMessage(ag.ctx, &fftypes.Batch{}, &fftypes.Pin{Masked: true, Sequence: 12345, Signer: "0x12345"}, 10, &fftypes.Message{ Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Group: fftypes.NewRandB32(), - Topics: fftypes.FFStringArray{"topic1"}, + ID: fftypes.NewUUID(), + Group: fftypes.NewRandB32(), + Topics: fftypes.FFStringArray{"topic1"}, + Namespace: "ns1", + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", + }, }, Pins: fftypes.FFStringArray{pin.String()}, }, bs) @@ -646,7 +688,7 @@ func TestCheckMaskedContextReadyMismatchedAuthor(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -668,7 +710,7 @@ func TestAttemptContextInitGetGroupByIDFail(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -690,7 +732,7 @@ func TestAttemptContextInitGroupNotFound(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -721,7 +763,7 @@ func TestAttemptContextInitAuthorMismatch(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -750,7 +792,7 @@ func TestAttemptContextInitNoMatch(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -783,7 +825,7 @@ func TestAttemptContextInitGetPinsFail(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -818,7 +860,7 @@ func TestAttemptContextInitGetPinsBlocked(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -853,7 +895,7 @@ func TestAttemptContextInitInsertPinsFail(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -875,7 +917,7 @@ func TestAttemptMessageDispatchFailGetData(t *testing.T) { _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}, - }, nil, nil) + }, nil, nil, nil) assert.EqualError(t, err, "pop") } @@ -885,15 +927,19 @@ func TestAttemptMessageDispatchFailValidateData(t *testing.T) { defer cancel() mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + org1 := newTestOrg("org1") + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdm.On("ValidateAll", ag.ctx, mock.Anything).Return(false, fmt.Errorf("pop")) _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ - Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}, + Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}}, Data: fftypes.DataRefs{ {ID: fftypes.NewUUID()}, }, - }, nil, nil) + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.EqualError(t, err, "pop") } @@ -905,6 +951,10 @@ func TestAttemptMessageDispatchMissingBlobs(t *testing.T) { blobHash := fftypes.NewRandB32() mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + org1 := newTestOrg("org1") + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32(), Blob: &fftypes.BlobRef{ Hash: blobHash, @@ -918,8 +968,8 @@ func TestAttemptMessageDispatchMissingBlobs(t *testing.T) { mdm.On("CopyBlobPStoDX", ag.ctx, mock.Anything).Return(nil, nil) dispatched, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ - Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}, - }, nil, nil) + Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}}, + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) assert.False(t, dispatched) @@ -929,6 +979,11 @@ func TestAttemptMessageDispatchMissingTransfers(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() + mim := ag.identity.(*identitymanagermocks.Manager) + + org1 := newTestOrg("org1") + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -939,10 +994,14 @@ func TestAttemptMessageDispatchMissingTransfers(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Type: fftypes.MessageTypeTransferBroadcast, + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", + }, }, } msg.Hash = msg.Header.Hash() - dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, nil) + dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) assert.False(t, dispatched) @@ -954,6 +1013,11 @@ func TestAttemptMessageDispatchGetTransfersFail(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() + mim := ag.identity.(*identitymanagermocks.Manager) + + org1 := newTestOrg("org1") + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -962,12 +1026,13 @@ func TestAttemptMessageDispatchGetTransfersFail(t *testing.T) { msg := &fftypes.Message{ Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeTransferBroadcast, + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeTransferBroadcast, + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, } msg.Hash = msg.Header.Hash() - dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, nil) + dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.EqualError(t, err, "pop") assert.False(t, dispatched) @@ -979,10 +1044,13 @@ func TestAttemptMessageDispatchTransferMismatch(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() + org1 := newTestOrg("org1") + msg := &fftypes.Message{ Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeTransferBroadcast, + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeTransferBroadcast, + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, } msg.Hash = msg.Header.Hash() @@ -992,13 +1060,16 @@ func TestAttemptMessageDispatchTransferMismatch(t *testing.T) { MessageHash: fftypes.NewRandB32(), }} + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdi := ag.database.(*databasemocks.Plugin) mdi.On("GetTokenTransfers", ag.ctx, mock.Anything).Return(transfers, nil, nil) - dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, nil) + dispatched, err := ag.attemptMessageDispatch(ag.ctx, msg, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) assert.False(t, dispatched) @@ -1006,13 +1077,20 @@ func TestAttemptMessageDispatchTransferMismatch(t *testing.T) { mdi.AssertExpectations(t) } -func TestDefinitionBroadcastActionReject(t *testing.T) { +func TestDefinitionBroadcastActionRejectCustomCorrelator(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + + customCorrelator := fftypes.NewUUID() msh := ag.definitions.(*definitionsmocks.DefinitionHandlers) - msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.ActionReject, &definitions.DefinitionBatchActions{}, nil) + msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything). + Return(definitions.HandlerResult{Action: definitions.ActionReject, CustomCorrelator: customCorrelator}, nil) mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -1035,20 +1113,71 @@ func TestDefinitionBroadcastActionReject(t *testing.T) { return true })).Return(nil) - mdi.On("InsertEvent", ag.ctx, mock.Anything).Return(nil) + mdi.On("InsertEvent", ag.ctx, mock.MatchedBy(func(event *fftypes.Event) bool { + return event.Correlator.Equals(customCorrelator) + })).Return(nil) _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ Header: fftypes.MessageHeader{ Type: fftypes.MessageTypeDefinition, ID: fftypes.NewUUID(), Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, Data: fftypes.DataRefs{ {ID: fftypes.NewUUID()}, }, - }, nil, bs) + }, nil, bs, &fftypes.Pin{Signer: "0x12345"}) + assert.NoError(t, err) + err = bs.RunFinalize(ag.ctx) assert.NoError(t, err) +} + +func TestDefinitionBroadcastInvalidSigner(t *testing.T) { + ag, cancel := newTestAggregator() + defer cancel() + bs := newBatchState(ag) + + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) + + mdm := ag.data.(*datamocks.Manager) + mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + mdi := ag.database.(*databasemocks.Plugin) + mdi.On("UpdateMessage", ag.ctx, mock.Anything, mock.MatchedBy(func(u database.Update) bool { + update, err := u.Finalize() + assert.NoError(t, err) + assert.Len(t, update.SetOperations, 2) + + assert.Equal(t, "confirmed", update.SetOperations[0].Field) + v, err := update.SetOperations[0].Value.Value() + assert.NoError(t, err) + assert.Greater(t, v, int64(0)) + + assert.Equal(t, "state", update.SetOperations[1].Field) + v, err = update.SetOperations[1].Value.Value() + assert.NoError(t, err) + assert.Equal(t, "rejected", v) + + return true + })).Return(nil) + mdi.On("InsertEvent", ag.ctx, mock.Anything).Return(nil) + + _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ + Header: fftypes.MessageHeader{ + Type: fftypes.MessageTypeDefinition, + ID: fftypes.NewUUID(), + Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, + }, + Data: fftypes.DataRefs{ + {ID: fftypes.NewUUID()}, + }, + }, nil, bs, &fftypes.Pin{Signer: "0x12345"}) + assert.NoError(t, err) } func TestDispatchBroadcastQueuesLaterDispatch(t *testing.T) { @@ -1056,6 +1185,11 @@ func TestDispatchBroadcastQueuesLaterDispatch(t *testing.T) { defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, false, nil).Once() @@ -1068,6 +1202,7 @@ func TestDispatchBroadcastQueuesLaterDispatch(t *testing.T) { ID: fftypes.NewUUID(), Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, } msg2 := &fftypes.Message{ @@ -1076,6 +1211,7 @@ func TestDispatchBroadcastQueuesLaterDispatch(t *testing.T) { ID: fftypes.NewUUID(), Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, } @@ -1103,6 +1239,11 @@ func TestDispatchPrivateQueuesLaterDispatch(t *testing.T) { defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, false, nil).Once() @@ -1116,7 +1257,7 @@ func TestDispatchPrivateQueuesLaterDispatch(t *testing.T) { mdi := ag.database.(*databasemocks.Plugin) mdi.On("GetNextPins", ag.ctx, mock.Anything).Return([]*fftypes.NextPin{ - {Context: context, Nonce: 1 /* match member1NonceOne */, Identity: "org1", Hash: member1NonceOne}, + {Context: context, Nonce: 1 /* match member1NonceOne */, Identity: org1.DID, Hash: member1NonceOne}, }, nil, nil) msg1 := &fftypes.Message{ @@ -1126,8 +1267,8 @@ func TestDispatchPrivateQueuesLaterDispatch(t *testing.T) { Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, Group: groupID, - Identity: fftypes.Identity{ - Author: "org1", + SignerRef: fftypes.SignerRef{ + Author: org1.DID, }, }, Pins: fftypes.FFStringArray{member1NonceOne.String()}, @@ -1139,8 +1280,8 @@ func TestDispatchPrivateQueuesLaterDispatch(t *testing.T) { Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, Group: groupID, - Identity: fftypes.Identity{ - Author: "org1", + SignerRef: fftypes.SignerRef{ + Author: org1.DID, }, }, Pins: fftypes.FFStringArray{member1NonceTwo.String()}, @@ -1170,20 +1311,25 @@ func TestDispatchPrivateNextPinIncremented(t *testing.T) { defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil).Twice() groupID := fftypes.NewRandB32() initNPG := &nextPinGroupState{topic: "topic1", groupID: groupID} - member1NonceOne := initNPG.calcPinHash("org1", 1) - member1NonceTwo := initNPG.calcPinHash("org1", 2) + member1NonceOne := initNPG.calcPinHash(org1.DID, 1) + member1NonceTwo := initNPG.calcPinHash(org1.DID, 2) h := sha256.New() h.Write([]byte("topic1")) context := fftypes.HashResult(h) mdi := ag.database.(*databasemocks.Plugin) mdi.On("GetNextPins", ag.ctx, mock.Anything).Return([]*fftypes.NextPin{ - {Context: context, Nonce: 1 /* match member1NonceOne */, Identity: "org1", Hash: member1NonceOne}, + {Context: context, Nonce: 1 /* match member1NonceOne */, Identity: org1.DID, Hash: member1NonceOne}, }, nil, nil) msg1 := &fftypes.Message{ @@ -1193,8 +1339,9 @@ func TestDispatchPrivateNextPinIncremented(t *testing.T) { Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, Group: groupID, - Identity: fftypes.Identity{ - Author: "org1", + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", }, }, Pins: fftypes.FFStringArray{member1NonceOne.String()}, @@ -1206,8 +1353,9 @@ func TestDispatchPrivateNextPinIncremented(t *testing.T) { Namespace: "any", Topics: fftypes.FFStringArray{"topic1"}, Group: groupID, - Identity: fftypes.Identity{ - Author: "org1", + SignerRef: fftypes.SignerRef{ + Author: org1.DID, + Key: "0x12345", }, }, Pins: fftypes.FFStringArray{member1NonceTwo.String()}, @@ -1221,22 +1369,28 @@ func TestDispatchPrivateNextPinIncremented(t *testing.T) { } // First message should dispatch - err := ag.processMessage(ag.ctx, batch, &fftypes.Pin{Masked: true, Sequence: 12345}, 0, msg1, bs) + err := ag.processMessage(ag.ctx, batch, &fftypes.Pin{Masked: true, Sequence: 12345, Signer: "0x12345"}, 0, msg1, bs) assert.NoError(t, err) // Second message should dispatch too (Twice on GetMessageData) - err = ag.processMessage(ag.ctx, batch, &fftypes.Pin{Masked: true, Sequence: 12346}, 0, msg2, bs) + err = ag.processMessage(ag.ctx, batch, &fftypes.Pin{Masked: true, Sequence: 12346, Signer: "0x12345"}, 0, msg2, bs) assert.NoError(t, err) mdi.AssertExpectations(t) mdm.AssertExpectations(t) } + func TestDefinitionBroadcastActionRetry(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + msh := ag.definitions.(*definitionsmocks.DefinitionHandlers) - msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.ActionRetry, &definitions.DefinitionBatchActions{}, fmt.Errorf("pop")) + msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.HandlerResult{Action: definitions.ActionRetry}, fmt.Errorf("pop")) mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -1246,21 +1400,147 @@ func TestDefinitionBroadcastActionRetry(t *testing.T) { Type: fftypes.MessageTypeDefinition, ID: fftypes.NewUUID(), Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, Data: fftypes.DataRefs{ {ID: fftypes.NewUUID()}, }, - }, nil, nil) + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.EqualError(t, err, "pop") } +func TestDefinitionBroadcastRejectSignerLookupFail(t *testing.T) { + ag, cancel := newTestAggregator() + defer cancel() + + org1 := newTestOrg("org1") + + mdm := ag.data.(*datamocks.Manager) + mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) + + valid, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ + Header: fftypes.MessageHeader{ + Type: fftypes.MessageTypeDefinition, + ID: fftypes.NewUUID(), + Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, + }, + Data: fftypes.DataRefs{ + {ID: fftypes.NewUUID()}, + }, + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) + assert.Regexp(t, "pop", err) + assert.False(t, valid) + + mdm.AssertExpectations(t) + mim.AssertExpectations(t) +} + +func TestDefinitionBroadcastRejectSignerLookupWrongOrg(t *testing.T) { + ag, cancel := newTestAggregator() + defer cancel() + + org1 := newTestOrg("org1") + + mdm := ag.data.(*datamocks.Manager) + mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(newTestOrg("org2"), nil) + + valid, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ + Header: fftypes.MessageHeader{ + Type: fftypes.MessageTypeDefinition, + ID: fftypes.NewUUID(), + Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, + }, + Data: fftypes.DataRefs{ + {ID: fftypes.NewUUID()}, + }, + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) + assert.NoError(t, err) + assert.False(t, valid) + + mdm.AssertExpectations(t) + mim.AssertExpectations(t) +} + +func TestDefinitionBroadcastRejectBadSigner(t *testing.T) { + ag, cancel := newTestAggregator() + defer cancel() + + org1 := newTestOrg("org1") + + mdm := ag.data.(*datamocks.Manager) + mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + + valid, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ + Header: fftypes.MessageHeader{ + Type: fftypes.MessageTypeDefinition, + ID: fftypes.NewUUID(), + Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x23456", Author: org1.DID}, + }, + Data: fftypes.DataRefs{ + {ID: fftypes.NewUUID()}, + }, + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) + assert.NoError(t, err) + assert.False(t, valid) + +} + +func TestDefinitionBroadcastRejectUnregisteredSignerIdentityClaim(t *testing.T) { + ag, cancel := newTestAggregator() + defer cancel() + + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil, nil) + + msh := ag.definitions.(*definitionsmocks.DefinitionHandlers) + msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.HandlerResult{Action: definitions.ActionWait}, nil) + + mdm := ag.data.(*datamocks.Manager) + mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) + + valid, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ + Header: fftypes.MessageHeader{ + Type: fftypes.MessageTypeDefinition, + Tag: fftypes.SystemTagIdentityClaim, + ID: fftypes.NewUUID(), + Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, + }, + Data: fftypes.DataRefs{ + {ID: fftypes.NewUUID()}, + }, + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) + assert.NoError(t, err) + assert.False(t, valid) + + mim.AssertExpectations(t) + msh.AssertExpectations(t) + mdm.AssertExpectations(t) +} + func TestDefinitionBroadcastActionWait(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() + org1 := newTestOrg("org1") + + mim := ag.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) + msh := ag.definitions.(*definitionsmocks.DefinitionHandlers) - msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.ActionWait, &definitions.DefinitionBatchActions{}, nil) + msh.On("HandleDefinitionBroadcast", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(definitions.HandlerResult{Action: definitions.ActionWait}, nil) mdm := ag.data.(*datamocks.Manager) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) @@ -1270,11 +1550,12 @@ func TestDefinitionBroadcastActionWait(t *testing.T) { Type: fftypes.MessageTypeDefinition, ID: fftypes.NewUUID(), Namespace: "any", + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, Data: fftypes.DataRefs{ {ID: fftypes.NewUUID()}, }, - }, nil, nil) + }, nil, &batchState{}, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) } @@ -1283,17 +1564,21 @@ func TestAttemptMessageDispatchEventFail(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdm.On("ValidateAll", ag.ctx, mock.Anything).Return(true, nil) mdi.On("UpdateMessage", ag.ctx, mock.Anything, mock.Anything).Return(nil) mdi.On("InsertEvent", ag.ctx, mock.Anything).Return(fmt.Errorf("pop")) _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ - Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}, - }, nil, bs) + Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}}, + }, nil, bs, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) err = bs.RunFinalize(ag.ctx) @@ -1305,9 +1590,13 @@ func TestAttemptMessageDispatchGroupInit(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdm.On("ValidateAll", ag.ctx, mock.Anything).Return(true, nil) mdi.On("UpdateMessage", ag.ctx, mock.Anything, mock.Anything).Return(nil) @@ -1315,10 +1604,11 @@ func TestAttemptMessageDispatchGroupInit(t *testing.T) { _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeGroupInit, + ID: fftypes.NewUUID(), + Type: fftypes.MessageTypeGroupInit, + SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}, }, - }, nil, bs) + }, nil, bs, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) } @@ -1327,16 +1617,20 @@ func TestAttemptMessageUpdateMessageFail(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() bs := newBatchState(ag) + org1 := newTestOrg("org1") mdi := ag.database.(*databasemocks.Plugin) mdm := ag.data.(*datamocks.Manager) + mim := ag.identity.(*identitymanagermocks.Manager) + + mim.On("FindIdentityForVerifier", ag.ctx, mock.Anything, mock.Anything, mock.Anything).Return(org1, nil) mdm.On("GetMessageData", ag.ctx, mock.Anything, true).Return([]*fftypes.Data{}, true, nil) mdm.On("ValidateAll", ag.ctx, mock.Anything).Return(true, nil) mdi.On("UpdateMessage", ag.ctx, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) _, err := ag.attemptMessageDispatch(ag.ctx, &fftypes.Message{ - Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}, - }, nil, bs) + Header: fftypes.MessageHeader{ID: fftypes.NewUUID(), SignerRef: fftypes.SignerRef{Key: "0x12345", Author: org1.DID}}, + }, nil, bs, &fftypes.Pin{Signer: "0x12345"}) assert.NoError(t, err) err = bs.RunFinalize(ag.ctx) @@ -1361,12 +1655,12 @@ func TestRewindOffchainBatchesBatchesNoRewind(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() - go ag.offchainListener() + go ag.batchRewindListener() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() mdi := ag.database.(*databasemocks.Plugin) mdi.On("GetPins", ag.ctx, mock.Anything, mock.Anything).Return([]*fftypes.Pin{}, nil, nil) @@ -1381,12 +1675,12 @@ func TestRewindOffchainBatchesBatchesRewind(t *testing.T) { ag, cancel := newTestAggregator() defer cancel() - go ag.offchainListener() + go ag.batchRewindListener() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() - ag.offchainBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() + ag.rewindBatches <- fftypes.NewUUID() mdi := ag.database.(*databasemocks.Plugin) mdi.On("GetPins", ag.ctx, mock.Anything, mock.Anything).Return([]*fftypes.Pin{ diff --git a/internal/events/batch_pin_complete.go b/internal/events/batch_pin_complete.go index 98e2bc51e2..49eba7e7d5 100644 --- a/internal/events/batch_pin_complete.go +++ b/internal/events/batch_pin_complete.go @@ -32,7 +32,7 @@ import ( // // We must block here long enough to get the payload from the publicstorage, persist the messages in the correct // sequence, and also persist all the data. -func (em *eventManager) BatchPinComplete(bi blockchain.Plugin, batchPin *blockchain.BatchPin, signingIdentity string) error { +func (em *eventManager) BatchPinComplete(bi blockchain.Plugin, batchPin *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { if batchPin.TransactionID == nil { log.L(em.ctx).Errorf("Invalid BatchPin transaction - ID is nil") return nil // move on @@ -42,19 +42,19 @@ func (em *eventManager) BatchPinComplete(bi blockchain.Plugin, batchPin *blockch return nil // move on } - log.L(em.ctx).Infof("-> BatchPinComplete batch=%s txn=%s signingIdentity=%s", batchPin.BatchID, batchPin.Event.ProtocolID, signingIdentity) + log.L(em.ctx).Infof("-> BatchPinComplete batch=%s txn=%s signingIdentity=%s", batchPin.BatchID, batchPin.Event.ProtocolID, signingKey.Value) defer func() { - log.L(em.ctx).Infof("<- BatchPinComplete batch=%s txn=%s signingIdentity=%s", batchPin.BatchID, batchPin.Event.ProtocolID, signingIdentity) + log.L(em.ctx).Infof("<- BatchPinComplete batch=%s txn=%s signingIdentity=%s", batchPin.BatchID, batchPin.Event.ProtocolID, signingKey.Value) }() log.L(em.ctx).Tracef("BatchPinComplete batch=%s info: %+v", batchPin.BatchID, batchPin.Event.Info) if batchPin.BatchPayloadRef != "" { - return em.handleBroadcastPinComplete(batchPin, signingIdentity) + return em.handleBroadcastPinComplete(batchPin, signingKey) } - return em.handlePrivatePinComplete(batchPin) + return em.handlePrivatePinComplete(batchPin, signingKey) } -func (em *eventManager) handlePrivatePinComplete(batchPin *blockchain.BatchPin) error { +func (em *eventManager) handlePrivatePinComplete(batchPin *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { // Here we simple record all the pins as parked, and emit an event for the aggregator // to check whether the messages in the batch have been written. return em.retry.Do(em.ctx, "persist private batch pins", func(attempt int) (bool, error) { @@ -63,7 +63,7 @@ func (em *eventManager) handlePrivatePinComplete(batchPin *blockchain.BatchPin) err := em.database.RunAsGroup(em.ctx, func(ctx context.Context) error { err := em.persistBatchTransaction(ctx, batchPin) if err == nil { - err = em.persistContexts(ctx, batchPin, true) + err = em.persistContexts(ctx, batchPin, signingKey, true) } return err }) @@ -76,13 +76,14 @@ func (em *eventManager) persistBatchTransaction(ctx context.Context, batchPin *b return err } -func (em *eventManager) persistContexts(ctx context.Context, batchPin *blockchain.BatchPin, private bool) error { +func (em *eventManager) persistContexts(ctx context.Context, batchPin *blockchain.BatchPin, signingKey *fftypes.VerifierRef, private bool) error { for idx, hash := range batchPin.Contexts { if err := em.database.UpsertPin(ctx, &fftypes.Pin{ Masked: private, Hash: hash, Batch: batchPin.BatchID, Index: int64(idx), + Signer: signingKey.Value, // We don't store the type as we can infer that from the blockchain Created: fftypes.Now(), }); err != nil { return err @@ -91,7 +92,7 @@ func (em *eventManager) persistContexts(ctx context.Context, batchPin *blockchai return nil } -func (em *eventManager) handleBroadcastPinComplete(batchPin *blockchain.BatchPin, signingIdentity string) error { +func (em *eventManager) handleBroadcastPinComplete(batchPin *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { var body io.ReadCloser if err := em.retry.Do(em.ctx, "retrieve data", func(attempt int) (retry bool, err error) { body, err = em.publicstorage.RetrieveData(em.ctx, batchPin.BatchPayloadRef) @@ -127,9 +128,9 @@ func (em *eventManager) handleBroadcastPinComplete(batchPin *blockchain.BatchPin // Note that in the case of a bad batch broadcast, we don't store the pin. Because we know we // are never going to be able to process it (we retrieved it successfully, it's just invalid). - valid, err := em.persistBatchFromBroadcast(ctx, batch, batchPin.BatchHash, signingIdentity) + valid, err := em.persistBatchFromBroadcast(ctx, batch, batchPin.BatchHash) if valid && err == nil { - err = em.persistContexts(ctx, batchPin, false) + err = em.persistContexts(ctx, batchPin, signingKey, false) } return err }) diff --git a/internal/events/batch_pin_complete_test.go b/internal/events/batch_pin_complete_test.go index 5744cea234..47a0617ec8 100644 --- a/internal/events/batch_pin_complete_test.go +++ b/internal/events/batch_pin_complete_test.go @@ -37,12 +37,12 @@ import ( ) func sampleBatch(t *testing.T, txType fftypes.TransactionType, data ...*fftypes.Data) *fftypes.Batch { - identity := fftypes.Identity{Author: "signingOrg", Key: "0x12345"} + identity := fftypes.SignerRef{Author: "signingOrg", Key: "0x12345"} msg := &fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: identity, - ID: fftypes.NewUUID(), - TxType: txType, + SignerRef: identity, + ID: fftypes.NewUUID(), + TxType: txType, }, } for _, d := range data { @@ -50,9 +50,9 @@ func sampleBatch(t *testing.T, txType fftypes.TransactionType, data ...*fftypes. assert.NoError(t, err) } batch := &fftypes.Batch{ - Identity: identity, - ID: fftypes.NewUUID(), - Node: fftypes.NewUUID(), + SignerRef: identity, + ID: fftypes.NewUUID(), + Node: fftypes.NewUUID(), Payload: fftypes.BatchPayload{ TX: fftypes.TransactionRef{ ID: fftypes.NewUUID(), @@ -86,9 +86,9 @@ func TestBatchPinCompleteOkBroadcast(t *testing.T) { batchData := &fftypes.Batch{ ID: batch.BatchID, Namespace: "ns1", - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", - Key: "0x12345", + Key: "0x22222", }, PayloadRef: batch.BatchPayloadRef, Payload: fftypes.BatchPayload{ @@ -141,9 +141,12 @@ func TestBatchPinCompleteOkBroadcast(t *testing.T) { mbi := &blockchainmocks.Plugin{} mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", mock.Anything, "0x12345").Return("author1", nil) + mim.On("NormalizeSigningKeyIdentity", mock.Anything, "0x12345").Return("author1", nil) - err = em.BatchPinComplete(mbi, batch, "0x12345") + err = em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }) assert.NoError(t, err) mdi.AssertExpectations(t) @@ -192,7 +195,10 @@ func TestBatchPinCompleteOkPrivate(t *testing.T) { mdi.On("UpsertPin", mock.Anything, mock.Anything).Return(nil) mbi := &blockchainmocks.Plugin{} - err = em.BatchPinComplete(mbi, batch, "0xffffeeee") + err = em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0xffffeeee", + }) assert.NoError(t, err) // Call through to persistBatch - the hash of our batch will be invalid, @@ -223,7 +229,10 @@ func TestSequencedBroadcastRetrieveIPFSFail(t *testing.T) { mpi.On("RetrieveData", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) mbi := &blockchainmocks.Plugin{} - err := em.BatchPinComplete(mbi, batch, "0xffffeeee") + err := em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0xffffeeee", + }) mpi.AssertExpectations(t) assert.Regexp(t, "FF10158", err) } @@ -245,7 +254,10 @@ func TestBatchPinCompleteBadData(t *testing.T) { mpi.On("RetrieveData", mock.Anything, mock.Anything).Return(batchReadCloser, nil) mbi := &blockchainmocks.Plugin{} - err := em.BatchPinComplete(mbi, batch, "0xffffeeee") + err := em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0xffffeeee", + }) assert.NoError(t, err) // We do not return a blocking error in the case of bad data stored in IPFS } @@ -256,7 +268,10 @@ func TestBatchPinCompleteNoTX(t *testing.T) { batch := &blockchain.BatchPin{} mbi := &blockchainmocks.Plugin{} - err := em.BatchPinComplete(mbi, batch, "0x12345") + err := em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }) assert.NoError(t, err) } @@ -273,7 +288,10 @@ func TestBatchPinCompleteBadNamespace(t *testing.T) { } mbi := &blockchainmocks.Plugin{} - err := em.BatchPinComplete(mbi, batch, "0x12345") + err := em.BatchPinComplete(mbi, batch, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }) assert.NoError(t, err) } @@ -291,7 +309,7 @@ func TestPersistBatchAuthorResolveFail(t *testing.T) { batchHash := fftypes.NewRandB32() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -304,9 +322,9 @@ func TestPersistBatchAuthorResolveFail(t *testing.T) { Hash: batchHash, } mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", mock.Anything, mock.Anything).Return("", fmt.Errorf("pop")) + mim.On("NormalizeSigningKeyIdentity", mock.Anything, mock.Anything).Return("", fmt.Errorf("pop")) batch.Hash = batch.Payload.Hash() - valid, err := em.persistBatchFromBroadcast(context.Background(), batch, batchHash, "0x12345") + valid, err := em.persistBatchFromBroadcast(context.Background(), batch, batchHash) assert.NoError(t, err) // retryable assert.False(t, valid) } @@ -317,7 +335,7 @@ func TestPersistBatchBadAuthor(t *testing.T) { batchHash := fftypes.NewRandB32() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -330,9 +348,9 @@ func TestPersistBatchBadAuthor(t *testing.T) { Hash: batchHash, } mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", mock.Anything, mock.Anything).Return("author2", nil) + mim.On("NormalizeSigningKeyIdentity", mock.Anything, mock.Anything).Return("author2", nil) batch.Hash = batch.Payload.Hash() - valid, err := em.persistBatchFromBroadcast(context.Background(), batch, batchHash, "0x12345") + valid, err := em.persistBatchFromBroadcast(context.Background(), batch, batchHash) assert.NoError(t, err) assert.False(t, valid) } @@ -342,7 +360,7 @@ func TestPersistBatchMismatchChainHash(t *testing.T) { defer cancel() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -355,9 +373,9 @@ func TestPersistBatchMismatchChainHash(t *testing.T) { Hash: fftypes.NewRandB32(), } mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", mock.Anything, mock.Anything).Return("author1", nil) + mim.On("NormalizeSigningKeyIdentity", mock.Anything, mock.Anything).Return("author1", nil) batch.Hash = batch.Payload.Hash() - valid, err := em.persistBatchFromBroadcast(context.Background(), batch, fftypes.NewRandB32(), "0x12345") + valid, err := em.persistBatchFromBroadcast(context.Background(), batch, fftypes.NewRandB32()) assert.NoError(t, err) assert.False(t, valid) } @@ -367,7 +385,7 @@ func TestPersistBatchUpsertBatchMismatchHash(t *testing.T) { defer cancel() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -394,7 +412,7 @@ func TestPersistBatchBadHash(t *testing.T) { defer cancel() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -417,7 +435,7 @@ func TestPersistBatchUpsertBatchFail(t *testing.T) { defer cancel() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -443,7 +461,7 @@ func TestPersistBatchSwallowBadData(t *testing.T) { defer cancel() batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -474,7 +492,7 @@ func TestPersistBatchGoodDataUpsertOptimizeExistingFail(t *testing.T) { batch := &fftypes.Batch{ ID: fftypes.NewUUID(), Node: testNodeID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -507,7 +525,7 @@ func TestPersistBatchGoodDataUpsertOptimizeNewFail(t *testing.T) { batch := &fftypes.Batch{ ID: fftypes.NewUUID(), Node: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -710,6 +728,9 @@ func TestPersistContextsFail(t *testing.T) { Contexts: []*fftypes.Bytes32{ fftypes.NewRandB32(), }, + }, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) diff --git a/internal/events/dx_callbacks.go b/internal/events/dx_callbacks.go index f8831b4f28..f915fd0ca2 100644 --- a/internal/events/dx_callbacks.go +++ b/internal/events/dx_callbacks.go @@ -64,51 +64,49 @@ func (em *eventManager) MessageReceived(dx dataexchange.Plugin, peerID string, d return string(manifestBytes), err } -func (em *eventManager) checkReceivedIdentity(ctx context.Context, peerID, author, signingKey string) (node *fftypes.Node, err error) { +// Check data exchange peer the data came from, has been registered to the org listed in the batch. +// Note the on-chain identity check is performed separately by the aggregator (across broadcast and private consistently). +func (em *eventManager) checkReceivedOffchainIdentity(ctx context.Context, peerID, author string) (node *fftypes.Identity, err error) { l := log.L(em.ctx) - // Find the node associated with the peer - filter := database.NodeQueryFactory.NewFilter(ctx).Eq("dx.peer", peerID) - nodes, _, err := em.database.GetNodes(ctx, filter) + // Resolve the node for the peer ID + node, err = em.identity.FindIdentityForVerifier(ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: peerID, + }) if err != nil { - l.Errorf("Failed to retrieve node: %v", err) - return nil, err // retry for persistence error - } - if len(nodes) < 1 { - l.Errorf("Node not found for peer %s", peerID) - return nil, nil + return nil, err } - node = nodes[0] // Find the identity in the mesage - org, err := em.database.GetOrganizationByIdentity(ctx, signingKey) - if err != nil { + org, retryable, err := em.identity.CachedIdentityLookup(ctx, author) + if err != nil && retryable { l.Errorf("Failed to retrieve org: %v", err) - return nil, err // retry for persistence error + return nil, err // retryable error } - if org == nil { - l.Errorf("Org not found for identity %s", author) + if org == nil || err != nil { + l.Errorf("Identity %s not found", author) return nil, nil } // One of the orgs in the hierarchy of the author must be the owner of the peer node candidate := org - foundNodeOrg := signingKey == node.Owner - for !foundNodeOrg && candidate.Parent != "" { + foundNodeOrg := org.ID.Equals(node.Parent) + for !foundNodeOrg && candidate.Parent != nil { parent := candidate.Parent - candidate, err = em.database.GetOrganizationByIdentity(ctx, parent) + candidate, err = em.identity.CachedIdentityLookupByID(ctx, parent) if err != nil { l.Errorf("Failed to retrieve node org '%s': %v", parent, err) return nil, err // retry for persistence error } if candidate == nil { - l.Errorf("Did not find org '%s' in chain for identity '%s'", parent, org.Identity) + l.Errorf("Did not find org '%s' in chain for identity '%s' (%s)", parent, org.DID, org.ID) return nil, nil } - foundNodeOrg = candidate.Identity == node.Owner + foundNodeOrg = candidate.ID.Equals(node.Parent) } if !foundNodeOrg { - l.Errorf("No org in the chain matches owner '%s' of node '%s' ('%s')", node.Owner, node.ID, node.Name) + l.Errorf("No org in the chain matches owner '%s' of node '%s' ('%s')", node.Parent, node.ID, node.Name) return nil, nil } @@ -122,7 +120,7 @@ func (em *eventManager) privateBatchReceived(peerID string, batch *fftypes.Batch return true, em.database.RunAsGroup(em.ctx, func(ctx context.Context) error { l := log.L(ctx) - node, err := em.checkReceivedIdentity(ctx, peerID, batch.Author, batch.Key) + node, err := em.checkReceivedOffchainIdentity(ctx, peerID, batch.Author) if err != nil { return err } @@ -133,13 +131,13 @@ func (em *eventManager) privateBatchReceived(peerID string, batch *fftypes.Batch valid, err := em.persistBatch(ctx, batch) if err != nil || !valid { - l.Errorf("Batch received from %s/%s processing failed valid=%t: %s", node.Owner, node.Name, valid, err) + l.Errorf("Batch received from org=%s node=%s processing failed valid=%t: %s", node.Parent, node.Name, valid, err) return err // retry - persistBatch only returns retryable errors } if batch.Payload.TX.Type == fftypes.TransactionTypeBatchPin { // Poke the aggregator to do its stuff - em.aggregator.offchainBatches <- batch.ID + em.aggregator.rewindBatches <- batch.ID } else if batch.Payload.TX.Type == fftypes.TransactionTypeUnpinned { // We need to confirm all these messages immediately. if err := em.markUnpinnedMessagesConfirmed(ctx, batch); err != nil { @@ -179,6 +177,7 @@ func (em *eventManager) markUnpinnedMessagesConfirmed(ctx context.Context, batch for _, msg := range batch.Payload.Messages { event := fftypes.NewEvent(fftypes.EventTypeMessageConfirmed, batch.Namespace, msg.Header.ID, batch.Payload.TX.ID) + event.Correlator = msg.Header.CID if err := em.database.InsertEvent(ctx, event); err != nil { return err } @@ -252,7 +251,7 @@ func (em *eventManager) BLOBReceived(dx dataexchange.Plugin, peerID string, hash for bid := range batchIDs { var batchID = bid // cannot use the address of the loop var l.Infof("Batch '%s' contains reference to received blob. Peer='%s' Hash='%v' PayloadRef='%s'", &bid, peerID, &hash, payloadRef) - em.aggregator.offchainBatches <- &batchID + em.aggregator.rewindBatches <- &batchID } return false, nil diff --git a/internal/events/dx_callbacks_test.go b/internal/events/dx_callbacks_test.go index 0223a07373..6349f9749d 100644 --- a/internal/events/dx_callbacks_test.go +++ b/internal/events/dx_callbacks_test.go @@ -17,6 +17,7 @@ package events import ( + "context" "encoding/json" "fmt" "strings" @@ -25,6 +26,7 @@ import ( "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/mocks/definitionsmocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/dataexchange" "github.com/hyperledger/firefly/pkg/fftypes" @@ -43,23 +45,56 @@ func sampleBatchTransfer(t *testing.T, txType fftypes.TransactionType, data ...* return batch, b } +func newTestOrg(name string) *fftypes.Identity { + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeOrg, + Namespace: fftypes.SystemNamespace, + Name: name, + Parent: nil, + }, + } + identity.DID, _ = identity.GenerateDID(context.Background()) + return identity +} + +func newTestNode(name string, owner *fftypes.Identity) *fftypes.Identity { + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeNode, + Namespace: fftypes.SystemNamespace, + Name: name, + Parent: owner.ID, + }, + IdentityProfile: fftypes.IdentityProfile{ + Profile: fftypes.JSONObject{ + "id": fmt.Sprintf("%s-peer", name), + "url": fmt.Sprintf("https://%s.example.com", name), + }, + }, + } + identity.DID, _ = identity.GenerateDID(context.Background()) + return identity +} + func TestPinnedReceiveOK(t *testing.T) { em, cancel := newTestEventManager(t) defer cancel() _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "parentOrg"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(&fftypes.Organization{ - Identity: "parentOrg", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertMessage", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil, nil) m, err := em.MessageReceived(mdx, "peer1", b) @@ -76,23 +111,22 @@ func TestMessageReceiveOkBadBatchIgnored(t *testing.T) { _, b := sampleBatchTransfer(t, fftypes.TransactionTypeTokenPool) - mdi := em.database.(*databasemocks.Plugin) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "parentOrg"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(&fftypes.Organization{ - Identity: "parentOrg", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) + m, err := em.MessageReceived(mdx, "peer1", b) assert.NoError(t, err) assert.Empty(t, m) - mdi.AssertExpectations(t) mdx.AssertExpectations(t) + mim.AssertExpectations(t) } func TestMessageReceivePersistBatchError(t *testing.T) { @@ -101,17 +135,16 @@ func TestMessageReceivePersistBatchError(t *testing.T) { _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "parentOrg"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(&fftypes.Organization{ - Identity: "parentOrg", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(fmt.Errorf("pop")) m, err := em.MessageReceived(mdx, "peer1", b) assert.Regexp(t, "FF10158", err) @@ -119,6 +152,7 @@ func TestMessageReceivePersistBatchError(t *testing.T) { mdi.AssertExpectations(t) mdx.AssertExpectations(t) + mim.AssertExpectations(t) } func TestMessageReceivedBadData(t *testing.T) { @@ -193,86 +227,34 @@ func TestMessageReceiveNodeLookupError(t *testing.T) { Batch: batch, }) - mdi := em.database.(*databasemocks.Plugin) - mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) - m, err := em.MessageReceived(mdx, "peer1", b) - assert.Regexp(t, "FF10158", err) - assert.Empty(t, m) -} - -func TestMessageReceiveNodeNotFound(t *testing.T) { - em, cancel := newTestEventManager(t) - defer cancel() - - batch := &fftypes.Batch{} - b, _ := json.Marshal(&fftypes.TransportWrapper{ - Batch: batch, - }) - - mdi := em.database.(*databasemocks.Plugin) - mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return(nil, nil, nil) - m, err := em.MessageReceived(mdx, "peer1", b) - assert.NoError(t, err) - assert.Empty(t, m) -} - -func TestMessageReceiveAuthorLookupError(t *testing.T) { - em, cancel := newTestEventManager(t) - cancel() // to stop retry + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(nil, fmt.Errorf("pop")) - batch := &fftypes.Batch{} - b, _ := json.Marshal(&fftypes.TransportWrapper{ - Batch: batch, - }) - - mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "org1"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) m, err := em.MessageReceived(mdx, "peer1", b) assert.Regexp(t, "FF10158", err) assert.Empty(t, m) } -func TestMessageReceiveAuthorNotFound(t *testing.T) { - em, cancel := newTestEventManager(t) - defer cancel() - - batch := &fftypes.Batch{} - b, _ := json.Marshal(&fftypes.TransportWrapper{ - Batch: batch, - }) - - mdi := em.database.(*databasemocks.Plugin) - mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "org1"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, mock.Anything).Return(nil, nil) - m, err := em.MessageReceived(mdx, "peer1", b) - assert.NoError(t, err) - assert.Empty(t, m) -} - func TestMessageReceiveGetCandidateOrgFail(t *testing.T) { em, cancel := newTestEventManager(t) cancel() // retryable error so we need to break the loop _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "parentOrg"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(nil, fmt.Errorf("pop")) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(nil, true, fmt.Errorf("pop")) m, err := em.MessageReceived(mdx, "peer1", b) assert.Regexp(t, "FF10158", err) assert.Empty(t, m) @@ -287,15 +269,16 @@ func TestMessageReceiveGetCandidateOrgNotFound(t *testing.T) { _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "parentOrg"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(nil, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(nil, false, nil) m, err := em.MessageReceived(mdx, "peer1", b) assert.NoError(t, err) assert.Empty(t, m) @@ -312,15 +295,14 @@ func TestMessageReceiveGetCandidateOrgNotMatch(t *testing.T) { mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "another"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", Parent: "parentOrg", - }, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "parentOrg").Return(&fftypes.Organization{ - Identity: "parentOrg", - }, nil) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(newTestOrg("org2"), false, nil) m, err := em.MessageReceived(mdx, "peer1", b) assert.NoError(t, err) assert.Empty(t, m) @@ -350,7 +332,7 @@ func TestBLOBReceivedTriggersRewindOk(t *testing.T) { err := em.BLOBReceived(mdx, "peer1", *hash, 12345, "ns1/path1") assert.NoError(t, err) - bid := <-em.aggregator.offchainBatches + bid := <-em.aggregator.rewindBatches assert.Equal(t, *batchID, *bid) mdi.AssertExpectations(t) @@ -590,44 +572,94 @@ func TestMessageReceiveMessageIdentityFail(t *testing.T) { em, cancel := newTestEventManager(t) cancel() // to avoid infinite retry + org1 := newTestOrg("org1") + org2 := newTestOrg("org2") + org2.Parent = org1.ID + node1 := newTestNode("node1", org1) _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) - mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - mdi.On("GetNodes", em.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org2, false, nil) + mim.On("CachedIdentityLookupByID", em.ctx, org2.Parent).Return(nil, fmt.Errorf("pop")) m, err := em.MessageReceived(mdx, "peer1", b) assert.Regexp(t, "FF10158", err) assert.Empty(t, m) - mdi.AssertExpectations(t) mdx.AssertExpectations(t) + mim.AssertExpectations(t) +} + +func TestMessageReceiveMessageIdentityParentNotFound(t *testing.T) { + em, cancel := newTestEventManager(t) + cancel() // to avoid infinite retry + + org1 := newTestOrg("org1") + org2 := newTestOrg("org2") + org2.Parent = org1.ID + node1 := newTestNode("node1", org1) + _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) + + mdx := &dataexchangemocks.Plugin{} + + msh := em.definitions.(*definitionsmocks.DefinitionHandlers) + msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) + + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org2, false, nil) + mim.On("CachedIdentityLookupByID", em.ctx, org2.Parent).Return(nil, nil) + + m, err := em.MessageReceived(mdx, "peer1", b) + assert.NoError(t, err) + assert.Empty(t, m) + + mdx.AssertExpectations(t) + mim.AssertExpectations(t) } func TestMessageReceiveMessageIdentityIncorrect(t *testing.T) { em, cancel := newTestEventManager(t) cancel() // to avoid infinite retry - _, b := sampleBatchTransfer(t, fftypes.TransactionTypeUnpinned) + org1 := newTestOrg("org1") + org2 := newTestOrg("org2") + org3 := newTestOrg("org3") + org2.Parent = org1.ID + node1 := newTestNode("node1", org1) + _, b := sampleBatchTransfer(t, fftypes.TransactionTypeBatchPin) - mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org2, false, nil) + mim.On("CachedIdentityLookupByID", em.ctx, org2.Parent).Return(org3, nil) m, err := em.MessageReceived(mdx, "peer1", b) assert.NoError(t, err) assert.Empty(t, m) - mdi.AssertExpectations(t) mdx.AssertExpectations(t) + mim.AssertExpectations(t) } func TestMessageReceiveMessagePersistMessageFail(t *testing.T) { @@ -642,12 +674,14 @@ func TestMessageReceiveMessagePersistMessageFail(t *testing.T) { msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "0x12345"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", - }, nil) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertMessage", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) @@ -675,12 +709,14 @@ func TestMessageReceiveMessagePersistDataFail(t *testing.T) { msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "0x12345"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", - }, nil) + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertData", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(fmt.Errorf("pop")) @@ -705,15 +741,17 @@ func TestMessageReceiveUnpinnedBatchOk(t *testing.T) { mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "0x12345"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertData", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdi.On("UpsertMessage", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) @@ -740,15 +778,17 @@ func TestMessageReceiveUnpinnedBatchConfirmMessagesFail(t *testing.T) { mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "0x12345"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertData", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdi.On("UpsertMessage", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) @@ -775,15 +815,17 @@ func TestMessageReceiveUnpinnedBatchPersistEventFail(t *testing.T) { mdi := em.database.(*databasemocks.Plugin) mdx := &dataexchangemocks.Plugin{} + org1 := newTestOrg("org1") + node1 := newTestNode("node1", org1) + msh := em.definitions.(*definitionsmocks.DefinitionHandlers) msh.On("EnsureLocalGroup", em.ctx, mock.Anything).Return(true, nil) - - mdi.On("GetNodes", em.ctx, mock.Anything).Return([]*fftypes.Node{ - {Name: "node1", Owner: "0x12345"}, - }, nil, nil) - mdi.On("GetOrganizationByIdentity", em.ctx, "0x12345").Return(&fftypes.Organization{ - Identity: "0x12345", - }, nil) + mim := em.identity.(*identitymanagermocks.Manager) + mim.On("FindIdentityForVerifier", em.ctx, []fftypes.IdentityType{fftypes.IdentityTypeNode}, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }).Return(node1, nil) + mim.On("CachedIdentityLookup", em.ctx, "signingOrg").Return(org1, false, nil) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(nil, nil) mdi.On("UpsertData", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) mdi.On("UpsertMessage", em.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) diff --git a/internal/events/event_dispatcher_test.go b/internal/events/event_dispatcher_test.go index d07bdc4936..2b3653ba9a 100644 --- a/internal/events/event_dispatcher_test.go +++ b/internal/events/event_dispatcher_test.go @@ -430,7 +430,7 @@ func TestFilterEventsMatch(t *testing.T) { Topics: fftypes.FFStringArray{"topic1"}, Tag: "tag1", Group: nil, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "signingOrg", Key: "0x12345", }, @@ -447,7 +447,7 @@ func TestFilterEventsMatch(t *testing.T) { Topics: fftypes.FFStringArray{"topic1"}, Tag: "tag2", Group: gid1, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org2", Key: "0x23456", }, @@ -464,7 +464,7 @@ func TestFilterEventsMatch(t *testing.T) { Topics: fftypes.FFStringArray{"topic2"}, Tag: "tag1", Group: nil, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "signingOrg", Key: "0x12345", }, diff --git a/internal/events/event_manager.go b/internal/events/event_manager.go index d559ac4460..aea01bb083 100644 --- a/internal/events/event_manager.go +++ b/internal/events/event_manager.go @@ -59,7 +59,7 @@ type EventManager interface { // Bound blockchain callbacks OperationUpdate(plugin fftypes.Named, operationID *fftypes.UUID, txState blockchain.TransactionStatus, blockchainTXID, errorMessage string, opOutput fftypes.JSONObject) error - BatchPinComplete(bi blockchain.Plugin, batch *blockchain.BatchPin, signingIdentity string) error + BatchPinComplete(bi blockchain.Plugin, batch *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error BlockchainEvent(event *blockchain.EventWithSubscription) error // Bound dataexchange callbacks @@ -99,8 +99,8 @@ type eventManager struct { metrics metrics.Manager } -func NewEventManager(ctx context.Context, ni sysmessaging.LocalNodeInfo, pi publicstorage.Plugin, di database.Plugin, im identity.Manager, dh definitions.DefinitionHandlers, dm data.Manager, bm broadcast.Manager, pm privatemessaging.Manager, am assets.Manager, mm metrics.Manager) (EventManager, error) { - if ni == nil || pi == nil || di == nil || im == nil || dh == nil || dm == nil || bm == nil || pm == nil || am == nil { +func NewEventManager(ctx context.Context, ni sysmessaging.LocalNodeInfo, pi publicstorage.Plugin, di database.Plugin, bi blockchain.Plugin, im identity.Manager, dh definitions.DefinitionHandlers, dm data.Manager, bm broadcast.Manager, pm privatemessaging.Manager, am assets.Manager, mm metrics.Manager) (EventManager, error) { + if ni == nil || pi == nil || di == nil || bi == nil || im == nil || dh == nil || dm == nil || bm == nil || pm == nil || am == nil { return nil, i18n.NewError(ctx, i18n.MsgInitializationNilDepError) } newPinNotifier := newEventNotifier(ctx, "pins") @@ -126,7 +126,7 @@ func NewEventManager(ctx context.Context, ni sysmessaging.LocalNodeInfo, pi publ opCorrelationRetries: config.GetInt(config.EventAggregatorOpCorrelationRetries), newEventNotifier: newEventNotifier, newPinNotifier: newPinNotifier, - aggregator: newAggregator(ctx, di, dh, dm, newPinNotifier, mm), + aggregator: newAggregator(ctx, di, bi, dh, im, dm, newPinNotifier, mm), metrics: mm, } ie, _ := eifactory.GetPlugin(ctx, system.SystemEventsTransport) diff --git a/internal/events/event_manager_test.go b/internal/events/event_manager_test.go index e64a2409e1..a8db16ff5e 100644 --- a/internal/events/event_manager_test.go +++ b/internal/events/event_manager_test.go @@ -24,6 +24,7 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/internal/events/system" "github.com/hyperledger/firefly/mocks/assetmocks" + "github.com/hyperledger/firefly/mocks/blockchainmocks" "github.com/hyperledger/firefly/mocks/broadcastmocks" "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/datamocks" @@ -43,37 +44,18 @@ import ( var testNodeID = fftypes.NewUUID() func newTestEventManager(t *testing.T) (*eventManager, func()) { - config.Reset() - ctx, cancel := context.WithCancel(context.Background()) - mdi := &databasemocks.Plugin{} - mim := &identitymanagermocks.Manager{} - mpi := &publicstoragemocks.Plugin{} - met := &eventsmocks.Plugin{} - mdm := &datamocks.Manager{} - msh := &definitionsmocks.DefinitionHandlers{} - mbm := &broadcastmocks.Manager{} - mpm := &privatemessagingmocks.Manager{} - mam := &assetmocks.Manager{} - mni := &sysmessagingmocks.LocalNodeInfo{} - mmi := &metricsmocks.Manager{} - mmi.On("IsMetricsEnabled").Return(false) - mni.On("GetNodeUUID", mock.Anything).Return(testNodeID).Maybe() - met.On("Name").Return("ut").Maybe() - emi, err := NewEventManager(ctx, mni, mpi, mdi, mim, msh, mdm, mbm, mpm, mam, mmi) - em := emi.(*eventManager) - em.txHelper = &txcommonmocks.Helper{} - rag := mdi.On("RunAsGroup", em.ctx, mock.Anything).Maybe() - rag.RunFn = func(a mock.Arguments) { - rag.ReturnArguments = mock.Arguments{a[1].(func(context.Context) error)(a[0].(context.Context))} - } - assert.NoError(t, err) - return em, cancel + return newTestEventManagerCommon(t, false) } func newTestEventManagerWithMetrics(t *testing.T) (*eventManager, func()) { + return newTestEventManagerCommon(t, true) +} + +func newTestEventManagerCommon(t *testing.T, metrics bool) (*eventManager, func()) { config.Reset() ctx, cancel := context.WithCancel(context.Background()) mdi := &databasemocks.Plugin{} + mbi := &blockchainmocks.Plugin{} mim := &identitymanagermocks.Manager{} mpi := &publicstoragemocks.Plugin{} met := &eventsmocks.Plugin{} @@ -84,11 +66,14 @@ func newTestEventManagerWithMetrics(t *testing.T) (*eventManager, func()) { mam := &assetmocks.Manager{} mni := &sysmessagingmocks.LocalNodeInfo{} mmi := &metricsmocks.Manager{} - mmi.On("IsMetricsEnabled").Return(true) - mmi.On("TransferConfirmed", mock.Anything) + mmi.On("IsMetricsEnabled").Return(metrics) + if metrics { + mmi.On("TransferConfirmed", mock.Anything) + } mni.On("GetNodeUUID", mock.Anything).Return(testNodeID).Maybe() met.On("Name").Return("ut").Maybe() - emi, err := NewEventManager(ctx, mni, mpi, mdi, mim, msh, mdm, mbm, mpm, mam, mmi) + mbi.On("VerifierType").Return(fftypes.VerifierTypeEthAddress).Maybe() + emi, err := NewEventManager(ctx, mni, mpi, mdi, mbi, mim, msh, mdm, mbm, mpm, mam, mmi) em := emi.(*eventManager) em.txHelper = &txcommonmocks.Helper{} rag := mdi.On("RunAsGroup", em.ctx, mock.Anything).Maybe() @@ -119,7 +104,7 @@ func TestStartStop(t *testing.T) { } func TestStartStopBadDependencies(t *testing.T) { - _, err := NewEventManager(context.Background(), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) + _, err := NewEventManager(context.Background(), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) assert.Regexp(t, "FF10128", err) } @@ -128,6 +113,7 @@ func TestStartStopBadTransports(t *testing.T) { config.Set(config.EventTransportsEnabled, []string{"wrongun"}) defer config.Reset() mdi := &databasemocks.Plugin{} + mbi := &blockchainmocks.Plugin{} mim := &identitymanagermocks.Manager{} mpi := &publicstoragemocks.Plugin{} mdm := &datamocks.Manager{} @@ -137,7 +123,8 @@ func TestStartStopBadTransports(t *testing.T) { mni := &sysmessagingmocks.LocalNodeInfo{} mam := &assetmocks.Manager{} mm := &metricsmocks.Manager{} - _, err := NewEventManager(context.Background(), mni, mpi, mdi, mim, msh, mdm, mbm, mpm, mam, mm) + mbi.On("VerifierType").Return(fftypes.VerifierTypeEthAddress) + _, err := NewEventManager(context.Background(), mni, mpi, mdi, mbi, mim, msh, mdm, mbm, mpm, mam, mm) assert.Regexp(t, "FF10172", err) } diff --git a/internal/events/operation_update.go b/internal/events/operation_update.go index cdb4dab55a..a2be9cecfc 100644 --- a/internal/events/operation_update.go +++ b/internal/events/operation_update.go @@ -38,13 +38,15 @@ func (em *eventManager) operationUpdateCtx(ctx context.Context, operationID *fft // Special handling for OpTypeTokenTransfer, which writes an event when it fails if op.Type == fftypes.OpTypeTokenTransfer && txState == fftypes.OpStatusFailed { event := fftypes.NewEvent(fftypes.EventTypeTransferOpFailed, op.Namespace, op.ID, op.Transaction) - if em.metrics.IsMetricsEnabled() { - var tokenTransfer fftypes.TokenTransfer - err = txcommon.RetrieveTokenTransferInputs(ctx, op, &tokenTransfer) - if err != nil { - log.L(em.ctx).Warnf("Could not determine token transfer type: %s", err) + var tokenTransfer fftypes.TokenTransfer + err = txcommon.RetrieveTokenTransferInputs(ctx, op, &tokenTransfer) + if err != nil { + log.L(em.ctx).Warnf("Could not determine token transfer: %s", err) + } else { + event.Correlator = tokenTransfer.LocalID + if em.metrics.IsMetricsEnabled() { + em.metrics.TransferConfirmed(&tokenTransfer) } - em.metrics.TransferConfirmed(&tokenTransfer) } if err := em.database.InsertEvent(ctx, event); err != nil { return err @@ -54,6 +56,13 @@ func (em *eventManager) operationUpdateCtx(ctx context.Context, operationID *fft // Special handling for OpTypeTokenApproval, which writes an event when it fails if op.Type == fftypes.OpTypeTokenApproval && txState == fftypes.OpStatusFailed { event := fftypes.NewEvent(fftypes.EventTypeApprovalOpFailed, op.Namespace, op.ID, op.Transaction) + var tokenApproval fftypes.TokenApproval + err = txcommon.RetrieveTokenApprovalInputs(ctx, op, &tokenApproval) + if err != nil { + log.L(em.ctx).Warnf("Could not determine token retrieval: %s", err) + } else { + event.Correlator = tokenApproval.LocalID + } if err := em.database.InsertEvent(ctx, event); err != nil { return err } diff --git a/internal/events/operation_update_test.go b/internal/events/operation_update_test.go index f64f6a078d..0451d94892 100644 --- a/internal/events/operation_update_test.go +++ b/internal/events/operation_update_test.go @@ -110,7 +110,7 @@ func TestOperationTXUpdateError(t *testing.T) { mbi.AssertExpectations(t) } -func TestOperationUpdateTransferFail(t *testing.T) { +func TestOperationUpdateTransferFailBadData(t *testing.T) { em, cancel := newTestEventManagerWithMetrics(t) defer cancel() mdi := em.database.(*databasemocks.Plugin) @@ -139,6 +139,39 @@ func TestOperationUpdateTransferFail(t *testing.T) { mbi.AssertExpectations(t) } +func TestOperationUpdateTransferFail(t *testing.T) { + em, cancel := newTestEventManagerWithMetrics(t) + defer cancel() + mdi := em.database.(*databasemocks.Plugin) + mbi := &blockchainmocks.Plugin{} + mth := em.txHelper.(*txcommonmocks.Helper) + + localID := fftypes.NewUUID() + op := &fftypes.Operation{ + ID: fftypes.NewUUID(), + Type: fftypes.OpTypeTokenTransfer, + Namespace: "ns1", + Transaction: fftypes.NewUUID(), + Input: fftypes.JSONObject{ + "localId": localID.String(), + }, + } + info := fftypes.JSONObject{"some": "info"} + + mdi.On("GetOperationByID", em.ctx, op.ID).Return(op, nil) + mdi.On("ResolveOperation", mock.Anything, op.ID, fftypes.OpStatusFailed, "some error", info).Return(nil) + mdi.On("InsertEvent", em.ctx, mock.MatchedBy(func(e *fftypes.Event) bool { + return e.Type == fftypes.EventTypeTransferOpFailed && e.Namespace == "ns1" && e.Correlator.Equals(localID) + })).Return(nil) + mth.On("AddBlockchainTX", mock.Anything, op.Transaction, "0x12345").Return(nil) + + err := em.operationUpdateCtx(em.ctx, op.ID, fftypes.OpStatusFailed, "0x12345", "some error", info) + assert.NoError(t, err) + + mdi.AssertExpectations(t) + mbi.AssertExpectations(t) +} + func TestOperationUpdateTransferTransactionFail(t *testing.T) { em, cancel := newTestEventManager(t) defer cancel() @@ -192,7 +225,7 @@ func TestOperationUpdateTransferEventFail(t *testing.T) { mbi.AssertExpectations(t) } -func TestOperationUpdateApprovalFail(t *testing.T) { +func TestOperationUpdateApprovalFailBadInput(t *testing.T) { em, cancel := newTestEventManagerWithMetrics(t) defer cancel() mdi := em.database.(*databasemocks.Plugin) @@ -221,6 +254,38 @@ func TestOperationUpdateApprovalFail(t *testing.T) { mbi.AssertExpectations(t) } +func TestOperationUpdateApprovalFail(t *testing.T) { + em, cancel := newTestEventManagerWithMetrics(t) + defer cancel() + mdi := em.database.(*databasemocks.Plugin) + mbi := &blockchainmocks.Plugin{} + mth := em.txHelper.(*txcommonmocks.Helper) + + localID := fftypes.NewUUID() + op := &fftypes.Operation{ + ID: fftypes.NewUUID(), + Type: fftypes.OpTypeTokenApproval, + Namespace: "ns1", + Transaction: fftypes.NewUUID(), + Input: fftypes.JSONObject{ + "localId": localID.String(), + }, + } + info := fftypes.JSONObject{"some": "info"} + + mdi.On("GetOperationByID", em.ctx, op.ID).Return(op, nil) + mdi.On("ResolveOperation", mock.Anything, op.ID, fftypes.OpStatusFailed, "some error", info).Return(nil) + mdi.On("InsertEvent", em.ctx, mock.MatchedBy(func(e *fftypes.Event) bool { + return e.Type == fftypes.EventTypeApprovalOpFailed && e.Namespace == "ns1" && e.Correlator.Equals(localID) + })).Return(nil) + mth.On("AddBlockchainTX", mock.Anything, op.Transaction, "0x12345").Return(nil) + + err := em.operationUpdateCtx(em.ctx, op.ID, fftypes.OpStatusFailed, "0x12345", "some error", info) + assert.NoError(t, err) + + mdi.AssertExpectations(t) + mbi.AssertExpectations(t) +} func TestOperationUpdateApprovalTransactionFail(t *testing.T) { em, cancel := newTestEventManager(t) defer cancel() diff --git a/internal/events/persist_batch.go b/internal/events/persist_batch.go index 174939a0a4..3357e012d1 100644 --- a/internal/events/persist_batch.go +++ b/internal/events/persist_batch.go @@ -18,75 +18,20 @@ package events import ( "context" - "encoding/json" "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (em *eventManager) persistBatchFromBroadcast(ctx context.Context /* db TX context*/, batch *fftypes.Batch, onchainHash *fftypes.Bytes32, signingKey string) (valid bool, err error) { - l := log.L(ctx) - - // Verify that we can resolve the signing key back to this identity. - // This is a specific rule for broadcasts, so we know the authenticity of the data. - resolvedAuthor, err := em.identity.ResolveSigningKeyIdentity(ctx, signingKey) - if err != nil { - l.Errorf("Invalid batch '%s'. Author '%s' cound not be resolved: %s", batch.ID, batch.Author, err) - return false, nil // This is not retryable. skip this batch - } - - // The special case of a root org broadcast is allowed to not have a resolved author, because it's not in the database yet - if (resolvedAuthor == "" || resolvedAuthor != batch.Author) || signingKey != batch.Key { - if resolvedAuthor == "" && signingKey == batch.Key && em.isRootOrgBroadcast(batch) { - - // This is where a future "gatekeeper" plugin should sit, to allow pluggable authorization of new root - // identities joining the network - l.Infof("New root org broadcast: %s", batch.Author) - - } else { - - l.Errorf("Invalid batch '%s'. Key/author in batch '%s' / '%s' does not match resolved key/author '%s' / '%s'", batch.ID, batch.Key, batch.Author, signingKey, resolvedAuthor) - return false, nil // This is not retryable. skip this batch - - } - } +func (em *eventManager) persistBatchFromBroadcast(ctx context.Context /* db TX context*/, batch *fftypes.Batch, onchainHash *fftypes.Bytes32) (valid bool, err error) { if !onchainHash.Equals(batch.Hash) { - l.Errorf("Invalid batch '%s'. Hash in batch '%s' does not match transaction hash '%s'", batch.ID, batch.Hash, onchainHash) + log.L(ctx).Errorf("Invalid batch '%s'. Hash in batch '%s' does not match transaction hash '%s'", batch.ID, batch.Hash, onchainHash) return false, nil // This is not retryable. skip this batch } - valid, err = em.persistBatch(ctx, batch) - return valid, err -} - -func (em *eventManager) isRootOrgBroadcast(batch *fftypes.Batch) bool { - // Look into batch to see if it contains a message that contains a data item that is a root organization definition - if len(batch.Payload.Messages) > 0 { - message := batch.Payload.Messages[0] - if message.Header.Type == fftypes.MessageTypeDefinition { - if len(message.Data) > 0 { - messageDataItem := message.Data[0] - if len(batch.Payload.Data) > 0 { - batchDataItem := batch.Payload.Data[0] - if batchDataItem.ID.Equals(messageDataItem.ID) { - if batchDataItem.Validator == fftypes.MessageTypeDefinition { - var org *fftypes.Organization - err := json.Unmarshal(batchDataItem.Value.Bytes(), &org) - if err != nil { - return false - } - if org != nil && org.Name != "" && org.ID != nil && org.Parent == "" { - return true - } - } - } - } - } - } - } - return false + return em.persistBatch(ctx, batch) } // persistBatch performs very simple validation on each message/data element (hashes) and either persists diff --git a/internal/events/persist_batch_test.go b/internal/events/persist_batch_test.go index e79ea2201a..bb90e00ddb 100644 --- a/internal/events/persist_batch_test.go +++ b/internal/events/persist_batch_test.go @@ -22,28 +22,20 @@ import ( "testing" "github.com/hyperledger/firefly/mocks/databasemocks" - "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func TestPersistBatchFromBroadcastRootOrg(t *testing.T) { +func TestPersistBatchFromBroadcast(t *testing.T) { em, cancel := newTestEventManager(t) defer cancel() - mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", em.ctx, mock.Anything).Return("", nil) - mdi := em.database.(*databasemocks.Plugin) mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(fmt.Errorf(("pop"))) - org := fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Parent: "", // root - } + org := newTestOrg("org1") orgBytes, err := json.Marshal(&org) assert.NoError(t, err) data := &fftypes.Data{ @@ -54,7 +46,7 @@ func TestPersistBatchFromBroadcastRootOrg(t *testing.T) { batch := &fftypes.Batch{ ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/12345", Key: "0x12345", }, @@ -68,7 +60,7 @@ func TestPersistBatchFromBroadcastRootOrg(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Type: fftypes.MessageTypeDefinition, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "did:firefly:org/12345", Key: "0x12345", }, @@ -88,83 +80,24 @@ func TestPersistBatchFromBroadcastRootOrg(t *testing.T) { } batch.Hash = batch.Payload.Hash() - _, err = em.persistBatchFromBroadcast(em.ctx, batch, batch.Hash, "0x12345") + _, err = em.persistBatchFromBroadcast(em.ctx, batch, batch.Hash) assert.EqualError(t, err, "pop") // Confirms we got to upserting the batch } -func TestPersistBatchFromBroadcastRootOrgBadData(t *testing.T) { +func TestPersistBatchFromBroadcastBadHash(t *testing.T) { em, cancel := newTestEventManager(t) defer cancel() - mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", em.ctx, mock.Anything).Return("", nil) - - data := &fftypes.Data{ - ID: fftypes.NewUUID(), - Value: fftypes.JSONAnyPtr("!badness"), - Validator: fftypes.MessageTypeDefinition, - } - - batch := &fftypes.Batch{ - ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ - Key: "0x12345", - }, - Payload: fftypes.BatchPayload{ - TX: fftypes.TransactionRef{ - ID: fftypes.NewUUID(), - Type: fftypes.TransactionTypeBatchPin, - }, - Messages: []*fftypes.Message{ - { - Header: fftypes.MessageHeader{ - ID: fftypes.NewUUID(), - Type: fftypes.MessageTypeDefinition, - Identity: fftypes.Identity{ - Key: "0x12345", - }, - }, - Data: fftypes.DataRefs{ - { - ID: data.ID, - Hash: data.Hash, - }, - }, - }, - }, - Data: []*fftypes.Data{ - data, - }, - }, - } - batch.Hash = batch.Payload.Hash() - - valid, err := em.persistBatchFromBroadcast(em.ctx, batch, batch.Hash, "0x12345") - assert.NoError(t, err) - assert.False(t, valid) - -} - -func TestPersistBatchFromBroadcastNoRootOrgBadIdentity(t *testing.T) { - - em, cancel := newTestEventManager(t) - defer cancel() - - mim := em.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKeyIdentity", em.ctx, mock.Anything).Return("", nil) + mdi := em.database.(*databasemocks.Plugin) + mdi.On("UpsertBatch", em.ctx, mock.Anything).Return(fmt.Errorf(("pop"))) - batch := &fftypes.Batch{ - ID: fftypes.NewUUID(), - Identity: fftypes.Identity{ - Key: "0x12345", - }, - } - batch.Hash = batch.Payload.Hash() + batch := &fftypes.Batch{} + batch.Hash = fftypes.NewRandB32() - valid, err := em.persistBatchFromBroadcast(em.ctx, batch, batch.Hash, "0x12345") + ok, err := em.persistBatchFromBroadcast(em.ctx, batch, fftypes.NewRandB32()) assert.NoError(t, err) - assert.False(t, valid) + assert.False(t, ok) } diff --git a/internal/events/token_pool_created.go b/internal/events/token_pool_created.go index d8feded498..5a04baf905 100644 --- a/internal/events/token_pool_created.go +++ b/internal/events/token_pool_created.go @@ -165,7 +165,7 @@ func (em *eventManager) TokenPoolCreated(ti tokens.Plugin, pool *tokens.TokenPoo // Initiate a rewind if a batch was potentially completed by the arrival of this transaction if batchID != nil { log.L(em.ctx).Infof("Batch '%s' contains reference to received pool '%s'", batchID, pool.ProtocolID) - em.aggregator.offchainBatches <- batchID + em.aggregator.rewindBatches <- batchID } // Announce the details of the new token pool with the blockchain event details diff --git a/internal/events/tokens_transferred.go b/internal/events/tokens_transferred.go index 257890214d..453364c05a 100644 --- a/internal/events/tokens_transferred.go +++ b/internal/events/tokens_transferred.go @@ -152,7 +152,7 @@ func (em *eventManager) TokensTransferred(ti tokens.Plugin, transfer *tokens.Tok // Initiate a rewind if a batch was potentially completed by the arrival of this transfer if err == nil && batchID != nil { log.L(em.ctx).Infof("Batch '%s' contains reference to received transfer. Transfer='%s' Message='%s'", batchID, transfer.ProtocolID, transfer.Message) - em.aggregator.offchainBatches <- batchID + em.aggregator.rewindBatches <- batchID } return err diff --git a/internal/i18n/en_translations.go b/internal/i18n/en_translations.go index d747754992..3feb55f12b 100644 --- a/internal/i18n/en_translations.go +++ b/internal/i18n/en_translations.go @@ -131,7 +131,7 @@ var ( MsgOwnerMissing = ffm("FF10211", "Owner missing", 400) MsgUnknownIdentityPlugin = ffm("FF10212", "Unknown Identity plugin '%s'") MsgUnknownDataExchangePlugin = ffm("FF10213", "Unknown Data Exchange plugin '%s'") - MsgParentIdentityNotFound = ffm("FF10214", "Organization with identity '%s' not found in identity chain for %s '%s'") + MsgParentIdentityNotFound = ffm("FF10214", "Identity '%s' not found in identity chain for %s '%s'") MsgInvalidSigningIdentity = ffm("FF10215", "Invalid signing identity") MsgNodeAndOrgIDMustBeSet = ffm("FF10216", "node.name, org.name and org.identity must be configured first", 409) MsgBlobStreamingFailed = ffm("FF10217", "Blob streaming terminated with error", 500) @@ -139,8 +139,7 @@ var ( MsgGroupMustHaveMembers = ffm("FF10219", "Group must have at least one member", 400) MsgEmptyMemberIdentity = ffm("FF10220", "Identity is blank in member %d") MsgEmptyMemberNode = ffm("FF10221", "Node is blank in member %d") - MsgDuplicateMember = ffm("FF10222", "Member %d is a duplicate org+node combination") - MsgOrgNotFound = ffm("FF10223", "Org with name or identity '%s' not found", 400) + MsgDuplicateMember = ffm("FF10222", "Member %d is a duplicate org+node combination: %s", 400) MsgNodeNotFound = ffm("FF10224", "Node with name or identity '%s' not found", 400) MsgLocalNodeResolveFailed = ffm("FF10225", "Unable to find local node to add to group. Check the status API to confirm the node is registered", 500) MsgGroupNotFound = ffm("FF10226", "Group '%s' not found", 404) @@ -194,11 +193,11 @@ var ( MsgTokensRESTErr = ffm("FF10274", "Error from tokens service: %s") MsgTokenPoolDuplicate = ffm("FF10275", "Duplicate token pool") MsgTokenPoolRejected = ffm("FF10276", "Token pool with ID '%s' was rejected. Please check the FireFly logs for more information") - MsgAuthorNotFoundByDID = ffm("FF10277", "Author could not be resolved via DID '%s'") + MsgIdentityNotFoundByString = ffm("FF10277", "Identity could not be resolved via DID '%s'") MsgAuthorOrgNotFoundByName = ffm("FF10278", "Author organization could not be resolved via name '%s'") MsgAuthorOrgSigningKeyMismatch = ffm("FF10279", "Author organization '%s' is not associated with signing key '%s'") MsgCannotTransferToSelf = ffm("FF10280", "From and to addresses must be different", 400) - MsgLocalOrgLookupFailed = ffm("FF10281", "Unable resolve the local org by the configured signing key on the node. Please confirm the org is registered with key '%s'", 500) + MsgLocalOrgLookupFailed = ffm("FF10281", "Unable resolve the local org '%s' by the configured signing key on the node. Please confirm the org is registered with key '%s'", 500) MsgBigIntTooLarge = ffm("FF10282", "Byte length of serialized integer is too large %d (max=%d)") MsgBigIntParseFailed = ffm("FF10283", "Failed to parse JSON value '%s' into BigInt") MsgFabconnectRESTErr = ffm("FF10284", "Error from fabconnect: %s") @@ -266,4 +265,25 @@ var ( MsgFFIGenerationFailed = ffm("FF10346", "Error generating smart contract interface: %s", 400) MsgFFIGenerationUnsupported = ffm("FF10347", "Smart contract interface generation is not supported by this blockchain plugin", 400) MsgBlobHashMismatch = ffm("FF10348", "Blob hash mismatch sent=%s received=%s", 400) + MsgDIDResolverUnknown = ffm("FF10349", "DID resolver unknown for DID: %s", 400) + MsgIdentityNotOrg = ffm("FF10350", "Identity '%s' with DID '%s' is not an organization", 400) + MsgIdentityNotNode = ffm("FF10351", "Identity '%s' with DID '%s' is not a node", 400) + MsgBlockchainKeyNotSet = ffm("FF10352", "No blockchain key specified", 400) + MsgNoVerifierForIdentity = ffm("FF10353", "No %s verifier registered for identity %s", 400) + MsgNodeMissingBlockchainKey = ffm("FF10354", "No organization signing key configured on node", 400) + MsgAuthorRegistrationMismatch = ffm("FF10355", "Verifier '%s' cannot be used for signing with author '%s'. Verifier registered to '%s'", 400) + MsgAuthorMissingForKey = ffm("FF10356", "Key '%s' has not been registered by any identity, and a separate 'author' was not supplied", 404) + MsgAuthorIncorrectForRootReg = ffm("FF10357", "Author namespace '%s' and DID '%s' combination invalid for root organization registration", 400) + MsgKeyIdentityMissing = ffm("FF10358", "Identity owner of key '%s' not found", 500) + MsgCustomIdentitySystemNS = ffm("FF10359", "Custom identities cannot be defined in the '%s' namespace", 400) + MsgNilParentIdentity = ffm("FF10360", "Identity of type '%s' must have a valid parent", 400) + MsgSystemIdentityCustomNS = ffm("FF10361", "System identities must be defined in the '%s' namespace", 400) + MsgUnknownIdentityType = ffm("FF10362", "Unknown identity type: %s", 400) + MsgInvalidDIDForType = ffm("FF10363", "Invalid FireFly DID '%s' for type='%s' namespace='%s' name='%s'", 400) + MsgIdentityChainLoop = ffm("FF10364", "Loop detected on identity %s in chain for %s (%s)", 400) + MsgInvalidIdentityParentType = ffm("FF10365", "Parent %s (%s) of type %s is invalid for child %s (%s) of type", 400) + MsgParentIdentityMissingClaim = ffm("FF10366", "Parent %s (%s) is invalid (missing claim)", 400) + MsgDXInfoMissingID = ffm("FF10367", "Data exchange endpoint info missing 'id' field", 500) + MsgNilOrNullObject = ffm("FF10368", "Object is null") + MsgTokenApprovalFailed = ffm("FF10369", "Token approval with ID '%s' failed. Please check the FireFly logs for more information") ) diff --git a/internal/i18n/errors.go b/internal/i18n/errors.go index 7a347fc1bb..ef0dc3ff96 100644 --- a/internal/i18n/errors.go +++ b/internal/i18n/errors.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -29,5 +29,8 @@ func NewError(ctx context.Context, msg MessageKey, inserts ...interface{}) error // WrapError wraps an error func WrapError(ctx context.Context, err error, msg MessageKey, inserts ...interface{}) error { + if err == nil { + return NewError(ctx, msg, inserts...) + } return errors.Wrap(err, SanitizeLimit(ExpandWithCode(ctx, msg, inserts...), 2048)) } diff --git a/internal/i18n/errors_test.go b/internal/i18n/errors_test.go index c9a4fe474d..c7b1140c98 100644 --- a/internal/i18n/errors_test.go +++ b/internal/i18n/errors_test.go @@ -33,3 +33,8 @@ func TestWrapError(t *testing.T) { err := WrapError(context.Background(), fmt.Errorf("some error"), MsgConfigFailed) assert.Error(t, err) } + +func TestWrapNilError(t *testing.T) { + err := WrapError(context.Background(), nil, MsgConfigFailed) + assert.Error(t, err) +} diff --git a/internal/identity/identitymanager.go b/internal/identity/identitymanager.go index 31ec3eebc7..b0554c7ee5 100644 --- a/internal/identity/identitymanager.go +++ b/internal/identity/identitymanager.go @@ -23,6 +23,7 @@ import ( "time" "github.com/hyperledger/firefly/internal/config" + "github.com/hyperledger/firefly/internal/data" "github.com/hyperledger/firefly/internal/i18n" "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/pkg/blockchain" @@ -32,30 +33,40 @@ import ( "github.com/karlseguin/ccache" ) +const ( + KeyNormalizationBlockchainPlugin = iota + KeyNormalizationNone +) + type Manager interface { - ResolveInputIdentity(ctx context.Context, identity *fftypes.Identity) (err error) - ResolveSigningKey(ctx context.Context, inputKey string) (outputKey string, err error) - ResolveSigningKeyIdentity(ctx context.Context, signingKey string) (author string, err error) - ResolveLocalOrgDID(ctx context.Context) (localOrgDID string, err error) - GetLocalOrgKey(ctx context.Context) (string, error) - OrgDID(org *fftypes.Organization) string - GetLocalOrganization(ctx context.Context) (*fftypes.Organization, error) + ResolveInputSigningIdentity(ctx context.Context, namespace string, msgSignerRef *fftypes.SignerRef) (err error) + ResolveNodeOwnerSigningIdentity(ctx context.Context, msgSignerRef *fftypes.SignerRef) (err error) + NormalizeSigningKey(ctx context.Context, namespace string, keyNormalizationMode int) (signingKey string, err error) + FindIdentityForVerifier(ctx context.Context, iTypes []fftypes.IdentityType, namespace string, verifier *fftypes.VerifierRef) (identity *fftypes.Identity, err error) + ResolveIdentitySigner(ctx context.Context, identity *fftypes.Identity) (parentSigner *fftypes.SignerRef, err error) + CachedIdentityLookupByID(ctx context.Context, id *fftypes.UUID) (identity *fftypes.Identity, err error) + CachedIdentityLookup(ctx context.Context, did string) (identity *fftypes.Identity, retryable bool, err error) + CachedVerifierLookup(ctx context.Context, vType fftypes.VerifierType, ns, value string) (verifier *fftypes.Verifier, err error) + GetNodeOwnerBlockchainKey(ctx context.Context) (*fftypes.VerifierRef, error) + GetNodeOwnerOrg(ctx context.Context) (*fftypes.Identity, error) + VerifyIdentityChain(ctx context.Context, identity *fftypes.Identity) (immediateParent *fftypes.Identity, retryable bool, err error) } type identityManager struct { database database.Plugin plugin identity.Plugin blockchain blockchain.Plugin - - localOrgSigningKey string - localOrgDID string - identityCacheTTL time.Duration - identityCache *ccache.Cache - signingKeyCacheTTL time.Duration - signingKeyCache *ccache.Cache + data data.Manager + + nodeOwnerBlockchainKey *fftypes.VerifierRef + nodeOwningOrgIdentity *fftypes.Identity + identityCacheTTL time.Duration + identityCache *ccache.Cache + signingKeyCacheTTL time.Duration + signingKeyCache *ccache.Cache } -func NewIdentityManager(ctx context.Context, di database.Plugin, ii identity.Plugin, bi blockchain.Plugin) (Manager, error) { +func NewIdentityManager(ctx context.Context, di database.Plugin, ii identity.Plugin, bi blockchain.Plugin, dm data.Manager) (Manager, error) { if di == nil || ii == nil || bi == nil { return nil, i18n.NewError(ctx, i18n.MsgInitializationNilDepError) } @@ -63,6 +74,7 @@ func NewIdentityManager(ctx context.Context, di database.Plugin, ii identity.Plu database: di, plugin: ii, blockchain: bi, + data: dm, identityCacheTTL: config.GetDuration(config.IdentityManagerCacheTTL), signingKeyCacheTTL: config.GetDuration(config.IdentityManagerCacheTTL), } @@ -77,192 +89,388 @@ func NewIdentityManager(ctx context.Context, di database.Plugin, ii identity.Plu return im, nil } -func (im *identityManager) GetLocalOrganization(ctx context.Context) (*fftypes.Organization, error) { - orgDID, err := im.ResolveLocalOrgDID(ctx) +func ParseKeyNormalizationConfig(strConfigVal string) int { + switch strings.ToLower(strConfigVal) { + case "blockchain_plugin": + return KeyNormalizationBlockchainPlugin + default: + return KeyNormalizationNone + } +} + +// NormalizeSigningKey is for cases where there is no "author" field alongside the "key" in the input (custom contracts, tokens), +// or the author is known by the caller and should not / cannot be confirmed prior to sending (identity claims) +func (im *identityManager) NormalizeSigningKey(ctx context.Context, inputKey string, keyNormalizationMode int) (signingKey string, err error) { + if inputKey == "" { + msgSignerRef := &fftypes.SignerRef{} + err = im.ResolveNodeOwnerSigningIdentity(ctx, msgSignerRef) + if err != nil { + return "", err + } + return msgSignerRef.Key, nil + } + // If the caller is not confident that the blockchain plugin/connector should be used to resolve, + // for example it might be a different blockchain (Eth vs Fabric etc.), or it has it's own + // verification/management of keys, it should set `assets.keyNormalization: "none"` in the config. + if keyNormalizationMode != KeyNormalizationBlockchainPlugin { + return inputKey, nil + } + signer, err := im.normalizeKeyViaBlockchainPlugin(ctx, inputKey) if err != nil { - return nil, err + return "", err } - return im.cachedOrgLookupByAuthor(ctx, orgDID) + return signer.Value, nil } -func (im *identityManager) OrgDID(org *fftypes.Organization) string { - return org.GetDID() +// ResolveInputIdentity takes in blockchain signing input information from an API call, +// and resolves the final information that should be written in the message etc.. +func (im *identityManager) ResolveInputSigningIdentity(ctx context.Context, namespace string, msgSignerRef *fftypes.SignerRef) (err error) { + log.L(ctx).Debugf("Resolving identity input: key='%s' author='%s'", msgSignerRef.Key, msgSignerRef.Author) + + var verifier *fftypes.VerifierRef + switch { + case msgSignerRef.Author == "" && msgSignerRef.Key == "": + err = im.ResolveNodeOwnerSigningIdentity(ctx, msgSignerRef) + if err != nil { + return err + } + case msgSignerRef.Key != "": + if verifier, err = im.normalizeKeyViaBlockchainPlugin(ctx, msgSignerRef.Key); err != nil { + return err + } + msgSignerRef.Key = verifier.Value + // Fill in or verify the author DID based on the verfier, if it's been registered + identity, err := im.FindIdentityForVerifier(ctx, []fftypes.IdentityType{ + fftypes.IdentityTypeOrg, + fftypes.IdentityTypeCustom, + }, namespace, verifier) + if err != nil { + return err + } + switch { + case identity != nil: + if msgSignerRef.Author == identity.Name || msgSignerRef.Author == "" { + // Switch to full DID automatically + msgSignerRef.Author = identity.DID + } + if msgSignerRef.Author != identity.DID { + return i18n.NewError(ctx, i18n.MsgAuthorRegistrationMismatch, verifier.Value, msgSignerRef.Author, identity.DID) + } + case msgSignerRef.Author != "": + identity, _, err := im.CachedIdentityLookup(ctx, msgSignerRef.Author) + if err != nil { + return err + } + msgSignerRef.Author = identity.DID + default: + return i18n.NewError(ctx, i18n.MsgAuthorMissingForKey, msgSignerRef.Key) + } + case msgSignerRef.Author != "": + // Author must be non-empty (see above), so we want to find that identity and then + // use the first blockchain key that's associated with it. + identity, _, err := im.CachedIdentityLookup(ctx, msgSignerRef.Author) + if err != nil { + return err + } + msgSignerRef.Author = identity.DID + verifier, _, err = im.firstVerifierForIdentity(ctx, im.blockchain.VerifierType(), identity) + if err != nil { + return err + } + msgSignerRef.Key = verifier.Value + } + + log.L(ctx).Debugf("Resolved identity: key='%s' author='%s'", msgSignerRef.Key, msgSignerRef.Author) + return nil } -// ResolveInputIdentity takes in identity input information from an API call, or configuration load, and resolves -// the combination -func (im *identityManager) ResolveInputIdentity(ctx context.Context, identity *fftypes.Identity) (err error) { - log.L(ctx).Debugf("Resolving identity input: key='%s' author='%s'", identity.Key, identity.Author) +// firstVerifierForIdentity does a lookup of the first verifier of a given type (such as a blockchain signing key) registered to an identity, +// as a convenience to allow you to only specify the org name/DID when sending a message +func (im *identityManager) firstVerifierForIdentity(ctx context.Context, vType fftypes.VerifierType, identity *fftypes.Identity) (verifier *fftypes.VerifierRef, retryable bool, err error) { + fb := database.VerifierQueryFactory.NewFilterLimit(ctx, 1) + filter := fb.And( + fb.Eq("type", vType), + fb.Eq("identity", identity.ID), + ) + verifiers, _, err := im.database.GetVerifiers(ctx, filter) + if err != nil { + return nil, true /* DB Error */, err + } + if len(verifiers) == 0 { + return nil, false, i18n.NewError(ctx, i18n.MsgNoVerifierForIdentity, vType, identity.DID) + } + return &verifiers[0].VerifierRef, false, nil +} - identity.Key, err = im.ResolveSigningKey(ctx, identity.Key) +// ResolveNodeOwnerSigningIdentity add the node owner identity into a message +func (im *identityManager) ResolveNodeOwnerSigningIdentity(ctx context.Context, msgSignerRef *fftypes.SignerRef) (err error) { + verifierRef, err := im.GetNodeOwnerBlockchainKey(ctx) if err != nil { return err } - - // Resolve the identity - if err = im.resolveInputAuthor(ctx, identity); err != nil { + identity, err := im.GetNodeOwnerOrg(ctx) + if err != nil { return err } - - log.L(ctx).Debugf("Resolved identity: key='%s' author='%s'", identity.Key, identity.Author) - return + msgSignerRef.Author = identity.DID + msgSignerRef.Key = verifierRef.Value + return nil } -func (im *identityManager) ResolveSigningKeyIdentity(ctx context.Context, signingKey string) (author string, err error) { +// GetNodeOwnerBlockchainKey gets the blockchain key of the node owner, from the configuration +func (im *identityManager) GetNodeOwnerBlockchainKey(ctx context.Context) (*fftypes.VerifierRef, error) { + if im.nodeOwnerBlockchainKey != nil { + return im.nodeOwnerBlockchainKey, nil + } - signingKey, err = im.ResolveSigningKey(ctx, signingKey) - if err != nil { - return "", err + orgKey := config.GetString(config.OrgKey) + if orgKey == "" { + orgKey = config.GetString(config.OrgIdentityDeprecated) + if orgKey != "" { + log.L(ctx).Warnf("The %s config key has been deprecated. Please use %s instead", config.OrgIdentityDeprecated, config.OrgKey) + } + } + if orgKey == "" { + return nil, i18n.NewError(ctx, i18n.MsgNodeMissingBlockchainKey) } - // TODO: Consider other ways identity could be resolved - org, err := im.cachedOrgLookupBySigningKey(ctx, signingKey) + verifier, err := im.normalizeKeyViaBlockchainPlugin(ctx, orgKey) if err != nil { - return "", err + return nil, err } + im.nodeOwnerBlockchainKey = verifier + return im.nodeOwnerBlockchainKey, nil +} - return im.OrgDID(org), nil - +// normalizeKeyViaBlockchainPlugin does a cached lookup of the fully qualified key, associated with a key reference string +func (im *identityManager) normalizeKeyViaBlockchainPlugin(ctx context.Context, inputKey string) (verifier *fftypes.VerifierRef, err error) { + if inputKey == "" { + return nil, i18n.NewError(ctx, i18n.MsgBlockchainKeyNotSet) + } + if cached := im.signingKeyCache.Get(inputKey); cached != nil { + cached.Extend(im.identityCacheTTL) + return cached.Value().(*fftypes.VerifierRef), nil + } + keyString, err := im.blockchain.NormalizeSigningKey(ctx, inputKey) + if err != nil { + return nil, err + } + verifier = &fftypes.VerifierRef{ + Type: im.blockchain.VerifierType(), + Value: keyString, + } + im.signingKeyCache.Set(inputKey, verifier, im.identityCacheTTL) + return verifier, nil } -func (im *identityManager) getConfigOrgKey() string { - orgKey := config.GetString(config.OrgKey) - if orgKey == "" { - orgKey = config.GetString(config.OrgIdentityDeprecated) +// FindIdentityForVerifier is a reverse lookup function to look up an identity registered as owner of the specified verifier. +// Each of the supplied identity types will be checked in order. Returns nil if not found +func (im *identityManager) FindIdentityForVerifier(ctx context.Context, iTypes []fftypes.IdentityType, namespace string, verifier *fftypes.VerifierRef) (identity *fftypes.Identity, err error) { + for _, iType := range iTypes { + verifierNS := namespace + if iType != fftypes.IdentityTypeCustom { + // Non-custom identity types are always in the system namespace + verifierNS = fftypes.SystemNamespace + } + identity, err = im.cachedIdentityLookupByVerifierRef(ctx, verifierNS, verifier) + if err != nil || identity != nil { + return identity, err + } } - return orgKey + return nil, nil } -func (im *identityManager) GetLocalOrgKey(ctx context.Context) (string, error) { - if im.localOrgSigningKey != "" { - return im.localOrgSigningKey, nil +// GetNodeOwnerOrg returns the identity of the organization that owns the node, if fully registered +func (im *identityManager) GetNodeOwnerOrg(ctx context.Context) (*fftypes.Identity, error) { + if im.nodeOwningOrgIdentity != nil { + return im.nodeOwningOrgIdentity, nil } - resolvedSigningKey, err := im.blockchain.ResolveSigningKey(ctx, im.getConfigOrgKey()) + verifierRef, err := im.GetNodeOwnerBlockchainKey(ctx) if err != nil { - return "", err + return nil, err + } + orgName := config.GetString(config.OrgName) + identity, err := im.cachedIdentityLookupByVerifierRef(ctx, fftypes.SystemNamespace, verifierRef) + if err != nil || identity == nil { + return nil, i18n.WrapError(ctx, err, i18n.MsgLocalOrgLookupFailed, orgName, verifierRef.Value) + } + // Confirm that the specified blockchain key is associated with the correct org + if identity.Type != fftypes.IdentityTypeOrg || identity.Name != orgName { + return nil, i18n.NewError(ctx, i18n.MsgLocalOrgLookupFailed, orgName, verifierRef.Value) } - im.localOrgSigningKey = resolvedSigningKey - return im.localOrgSigningKey, nil + im.nodeOwningOrgIdentity = identity + return im.nodeOwningOrgIdentity, nil } -func (im *identityManager) ResolveLocalOrgDID(ctx context.Context) (localOrgDID string, err error) { - if im.localOrgDID != "" { - return im.localOrgDID, nil +func (im *identityManager) VerifyIdentityChain(ctx context.Context, checkIdentity *fftypes.Identity) (immediateParent *fftypes.Identity, retryable bool, err error) { + + err = checkIdentity.Validate(ctx) + if err != nil { + return nil, false, err } - orgKey := im.getConfigOrgKey() - im.localOrgDID, err = im.ResolveSigningKeyIdentity(ctx, orgKey) + loopDetect := make(map[fftypes.UUID]bool) + current := checkIdentity + for { + loopDetect[*current.ID] = true + parentID := current.Parent + if parentID == nil { + return immediateParent, false, nil + } + if _, ok := loopDetect[*parentID]; ok { + return nil, false, i18n.NewError(ctx, i18n.MsgIdentityChainLoop, parentID, current.DID, current.ID) + } + parent, err := im.CachedIdentityLookupByID(ctx, parentID) + if err != nil { + return nil, true /* DB Error */, err + } + if parent == nil { + return nil, false, i18n.NewError(ctx, i18n.MsgParentIdentityNotFound, parentID, current.DID, current.ID) + } + if err := im.validateParentType(ctx, current, parent); err != nil { + return nil, false, err + } + if parent.Messages.Claim == nil { + return nil, false, i18n.NewError(ctx, i18n.MsgParentIdentityMissingClaim, parent.DID, parent.ID) + } + current = parent + if immediateParent == nil { + immediateParent = parent + } + } + +} + +func (im *identityManager) ResolveIdentitySigner(ctx context.Context, identity *fftypes.Identity) (parentSigner *fftypes.SignerRef, err error) { + // Find the message that registered the identity + msg, err := im.database.GetMessageByID(ctx, identity.Messages.Claim) if err != nil { - return "", i18n.WrapError(ctx, err, i18n.MsgLocalOrgLookupFailed, orgKey) + return nil, err } - if im.localOrgDID == "" { - return "", i18n.NewError(ctx, i18n.MsgLocalOrgLookupFailed, orgKey) + if msg == nil { + return nil, i18n.NewError(ctx, i18n.MsgParentIdentityMissingClaim, identity.DID, identity.ID) } - return im.localOrgDID, err + // Return the signing identity from that claim + return &msg.Header.SignerRef, nil } -func (im *identityManager) ResolveSigningKey(ctx context.Context, inputKey string) (outputKey string, err error) { - // Resolve the signing key - if inputKey != "" { - if cached := im.signingKeyCache.Get(inputKey); cached != nil { - cached.Extend(im.identityCacheTTL) - outputKey = cached.Value().(string) - } else { - outputKey, err = im.blockchain.ResolveSigningKey(ctx, inputKey) - if err != nil { - return "", err - } - im.signingKeyCache.Set(inputKey, outputKey, im.identityCacheTTL) +func (im *identityManager) validateParentType(ctx context.Context, child *fftypes.Identity, parent *fftypes.Identity) error { + + switch child.Type { + case fftypes.IdentityTypeNode, fftypes.IdentityTypeOrg: + if parent.Type != fftypes.IdentityTypeOrg { + return i18n.NewError(ctx, i18n.MsgInvalidIdentityParentType, parent.DID, parent.ID, parent.Type, child.DID, child.ID, child.Type) } - } else { - return im.localOrgSigningKey, nil + return nil + case fftypes.IdentityTypeCustom: + if parent.Type != fftypes.IdentityTypeOrg && parent.Type != fftypes.IdentityTypeCustom { + return i18n.NewError(ctx, i18n.MsgInvalidIdentityParentType, parent.DID, parent.ID, parent.Type, child.DID, child.ID, child.Type) + } + return nil + default: + return i18n.NewError(ctx, i18n.MsgUnknownIdentityType, child.Type) } - return + } -func (im *identityManager) cachedOrgLookupBySigningKey(ctx context.Context, signingKey string) (org *fftypes.Organization, err error) { - cacheKey := fmt.Sprintf("key:%s", signingKey) +func (im *identityManager) cachedIdentityLookupByVerifierRef(ctx context.Context, namespace string, verifierRef *fftypes.VerifierRef) (*fftypes.Identity, error) { + cacheKey := fmt.Sprintf("key=%s|%s|%s", namespace, verifierRef.Type, verifierRef.Value) if cached := im.identityCache.Get(cacheKey); cached != nil { cached.Extend(im.identityCacheTTL) - org = cached.Value().(*fftypes.Organization) - } else { - if org, err = im.database.GetOrganizationByIdentity(ctx, signingKey); err != nil || org == nil { - return org, err - } - // Cache the result - im.identityCache.Set(cacheKey, org, im.identityCacheTTL) + return cached.Value().(*fftypes.Identity), nil + } + verifier, err := im.database.GetVerifierByValue(ctx, verifierRef.Type, namespace, verifierRef.Value) + if err != nil || verifier == nil { + return nil, err + } + identity, err := im.database.GetIdentityByID(ctx, verifier.Identity) + if err != nil { + return nil, err } - return org, nil + if identity == nil { + return nil, i18n.NewError(ctx, i18n.MsgEmptyMemberIdentity, verifier.Identity) + } + // Cache the result + im.identityCache.Set(cacheKey, identity, im.identityCacheTTL) + return identity, nil } -func (im *identityManager) cachedOrgLookupByAuthor(ctx context.Context, author string) (org *fftypes.Organization, err error) { +func (im *identityManager) CachedIdentityLookup(ctx context.Context, didLookupStr string) (identity *fftypes.Identity, retryable bool, err error) { // Use an LRU cache for the author identity, as it's likely for the same identity to be re-used over and over - cacheKey := fmt.Sprintf("author:%s", author) + cacheKey := fmt.Sprintf("did=%s", didLookupStr) + defer func() { + log.L(ctx).Debugf("Resolved DID '%s' to identity: %v (err=%v)", didLookupStr, identity, err) + }() if cached := im.identityCache.Get(cacheKey); cached != nil { cached.Extend(im.identityCacheTTL) - org = cached.Value().(*fftypes.Organization) + identity = cached.Value().(*fftypes.Identity) } else { - // TODO: Per comments in https://github.com/hyperledger/firefly/issues/187 we need to resolve whether "Organization" - // is the right thing to resolve here. We might want to fall-back to that in the case of plain string, but likely - // we need something more sophisticated here where we have an Identity object in the database. - if strings.HasPrefix(author, fftypes.FireflyOrgDIDPrefix) { - orgUUID, err := fftypes.ParseUUID(ctx, strings.TrimPrefix(author, fftypes.FireflyOrgDIDPrefix)) - if err != nil { - return nil, err + if strings.HasPrefix(didLookupStr, fftypes.DIDPrefix) { + if !strings.HasPrefix(didLookupStr, fftypes.FireFlyDIDPrefix) { + return nil, false, i18n.NewError(ctx, i18n.MsgDIDResolverUnknown, didLookupStr) } - if org, err = im.database.GetOrganizationByID(ctx, orgUUID); err != nil { - return nil, err + // Look up by the full DID + if identity, err = im.database.GetIdentityByDID(ctx, didLookupStr); err != nil { + return nil, true /* DB Error */, err } - if org == nil { - return nil, i18n.NewError(ctx, i18n.MsgAuthorNotFoundByDID, author) + if identity == nil && strings.HasPrefix(didLookupStr, fftypes.FireFlyOrgDIDPrefix) { + // We allow the UUID to be used to resolve DIDs as an alias to the name + uuid, err := fftypes.ParseUUID(ctx, strings.TrimPrefix(didLookupStr, fftypes.FireFlyOrgDIDPrefix)) + if err == nil { + if identity, err = im.database.GetIdentityByID(ctx, uuid); err != nil { + return nil, true /* DB Error */, err + } + } + } + if identity == nil { + return nil, false, i18n.NewError(ctx, i18n.MsgIdentityNotFoundByString, didLookupStr) } } else { - if org, err = im.database.GetOrganizationByName(ctx, author); err != nil { - return nil, err + // If there is just a name in there, then it could be an Org type identity (from the very original usage of the field) + if identity, err = im.database.GetIdentityByName(ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, didLookupStr); err != nil { + return nil, true /* DB Error */, err } - if org == nil { - return nil, i18n.NewError(ctx, i18n.MsgAuthorOrgNotFoundByName, author) + if identity == nil { + return nil, false, i18n.NewError(ctx, i18n.MsgAuthorOrgNotFoundByName, didLookupStr) } } // Cache the result - im.identityCache.Set(cacheKey, org, im.identityCacheTTL) + im.identityCache.Set(cacheKey, identity, im.identityCacheTTL) } - return org, nil + return identity, false, nil } -func (im *identityManager) resolveInputAuthor(ctx context.Context, identity *fftypes.Identity) (err error) { - - var org *fftypes.Organization - if identity.Author == "" { - // We allow lookup of an org by signing key (this convenience mechanism is currently not cached) - if identity.Key != "" { - if org, err = im.database.GetOrganizationByIdentity(ctx, identity.Key); err != nil { - return err - } - } - if org == nil { - // Otherwise default to the org identity that owns this node, if no input author specified - identity.Author = config.GetString(config.OrgName) +func (im *identityManager) CachedIdentityLookupByID(ctx context.Context, id *fftypes.UUID) (identity *fftypes.Identity, err error) { + // Use an LRU cache for the author identity, as it's likely for the same identity to be re-used over and over + cacheKey := fmt.Sprintf("id=%s", id) + if cached := im.identityCache.Get(cacheKey); cached != nil { + cached.Extend(im.identityCacheTTL) + identity = cached.Value().(*fftypes.Identity) + } else { + identity, err = im.database.GetIdentityByID(ctx, id) + if err != nil || identity == nil { + return identity, err } + // Cache the result + im.identityCache.Set(cacheKey, identity, im.identityCacheTTL) } + return identity, nil +} - if org == nil { - if org, err = im.cachedOrgLookupByAuthor(ctx, identity.Author); err != nil { - return err +func (im *identityManager) CachedVerifierLookup(ctx context.Context, vType fftypes.VerifierType, ns, value string) (verifier *fftypes.Verifier, err error) { + // Use an LRU cache for the author identity, as it's likely for the same identity to be re-used over and over + cacheKey := fmt.Sprintf("v=%s|%s|%s", vType, ns, value) + if cached := im.identityCache.Get(cacheKey); cached != nil { + cached.Extend(im.identityCacheTTL) + verifier = cached.Value().(*fftypes.Verifier) + } else { + verifier, err = im.database.GetVerifierByValue(ctx, vType, ns, value) + if err != nil || verifier == nil { + return verifier, err } + // Cache the result + im.identityCache.Set(cacheKey, verifier, im.identityCacheTTL) } - - // TODO: Organizations should be able to have multiple signing keys. See notes below about whether a level of - // indirection is needed in front of orgs (likely it is). - if identity.Key == "" { - identity.Key = org.Identity - } else if org.Identity != identity.Key { - return i18n.NewError(ctx, i18n.MsgAuthorOrgSigningKeyMismatch, org.ID, identity.Key) - } - - // We normalize the author to the DID - identity.Author = im.OrgDID(org) - return nil - + return verifier, nil } diff --git a/internal/identity/identitymanager_test.go b/internal/identity/identitymanager_test.go index 464851b1cb..fed4b3f041 100644 --- a/internal/identity/identitymanager_test.go +++ b/internal/identity/identitymanager_test.go @@ -24,9 +24,11 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/mocks/blockchainmocks" "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/mocks/datamocks" "github.com/hyperledger/firefly/mocks/identitymocks" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) func newTestIdentityManager(t *testing.T) (context.Context, *identityManager) { @@ -34,119 +36,262 @@ func newTestIdentityManager(t *testing.T) (context.Context, *identityManager) { mdi := &databasemocks.Plugin{} mii := &identitymocks.Plugin{} mbi := &blockchainmocks.Plugin{} + mdm := &datamocks.Manager{} config.Reset() + mbi.On("VerifierType").Return(fftypes.VerifierTypeEthAddress).Maybe() + ctx := context.Background() - im, err := NewIdentityManager(ctx, mdi, mii, mbi) + im, err := NewIdentityManager(ctx, mdi, mii, mbi, mdm) assert.NoError(t, err) return ctx, im.(*identityManager) } func TestNewIdentityManagerMissingDeps(t *testing.T) { - _, err := NewIdentityManager(context.Background(), nil, nil, nil) + _, err := NewIdentityManager(context.Background(), nil, nil, nil, nil) assert.Regexp(t, "FF10128", err) } -func TestResolveInputIdentityBlankBlank(t *testing.T) { - - identity := &fftypes.Identity{} - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", - } +func TestResolveInputSigningIdentityNoOrgKey(t *testing.T) { ctx, im := newTestIdentityManager(t) - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", ctx, "org1").Return(org, nil).Once() + msgIdentity := &fftypes.SignerRef{} + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "FF10354", err) + +} + +func TestResolveInputSigningIdentityOrgFallbackOk(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") config.Set(config.OrgName, "org1") - err := im.ResolveInputIdentity(ctx, identity) - assert.NoError(t, err) - assert.Equal(t, "0x12345", identity.Key) - assert.Equal(t, fmt.Sprintf("did:firefly:org/%s", org.ID), identity.Author) + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "key123").Return("fullkey123", nil) + + orgID := fftypes.NewUUID() - // Cached result (note once above) - err = im.ResolveInputIdentity(ctx, &fftypes.Identity{}) + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return((&fftypes.Verifier{ + Identity: orgID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, orgID). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + }, nil) + + msgIdentity := &fftypes.SignerRef{} + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) assert.NoError(t, err) + assert.Equal(t, "did:firefly:org/org1", msgIdentity.Author) + assert.Equal(t, "fullkey123", msgIdentity.Key) + + mbi.AssertExpectations(t) mdi.AssertExpectations(t) + } -func TestResolveInputIdentityBlankShortKeyNameResolved(t *testing.T) { +func TestResolveInputSigningIdentityByKeyOk(t *testing.T) { - identity := &fftypes.Identity{ - Key: "org1key", - } - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", + ctx, im := newTestIdentityManager(t) + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) + + idID := fftypes.NewUUID() + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return((&fftypes.Verifier{ + Identity: idID, + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, idID). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID, + DID: "did:firefly:ns/ns1/myid", + Namespace: fftypes.SystemNamespace, + Name: "myid", + Type: fftypes.IdentityTypeCustom, + }, + }, nil) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.NoError(t, err) + assert.Equal(t, "did:firefly:ns/ns1/myid", msgIdentity.Author) + assert.Equal(t, "fullkey123", msgIdentity.Key) + + mbi.AssertExpectations(t) + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningIdentityAnonymousKeyWithAuthorOk(t *testing.T) { ctx, im := newTestIdentityManager(t) + mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("0x12345", nil).Once() - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "0x12345").Return(org, nil).Once() + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) - config.Set(config.OrgName, "org1") + idID := fftypes.NewUUID() - err := im.ResolveInputIdentity(ctx, identity) + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123").Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "fullkey123").Return(nil, nil) + mdi.On("GetIdentityByDID", ctx, "did:firefly:ns/ns1/myid"). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID, + DID: "did:firefly:ns/ns1/myid", + Namespace: fftypes.SystemNamespace, + Name: "myid", + Type: fftypes.IdentityTypeCustom, + }, + }, nil) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", + Author: "did:firefly:ns/ns1/myid", + } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) assert.NoError(t, err) - assert.Equal(t, "0x12345", identity.Key) - assert.Equal(t, fmt.Sprintf("did:firefly:org/%s", org.ID), identity.Author) + assert.Equal(t, "did:firefly:ns/ns1/myid", msgIdentity.Author) + assert.Equal(t, "fullkey123", msgIdentity.Key) + mbi.AssertExpectations(t) mdi.AssertExpectations(t) } -func TestResolveInputIdentityBlankShortKeyNameUnresolved(t *testing.T) { +func TestResolveInputSigningIdentityKeyWithNoAuthorFail(t *testing.T) { - identity := &fftypes.Identity{ - Key: "org1key", - } - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", + ctx, im := newTestIdentityManager(t) + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123").Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "fullkey123").Return(nil, nil) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "FF10356", err) + + mbi.AssertExpectations(t) + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningIdentityByKeyDIDMismatch(t *testing.T) { ctx, im := newTestIdentityManager(t) + mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("0x12345", nil).Once() - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "0x12345").Return(nil, nil).Once() - mdi.On("GetOrganizationByName", ctx, "org1").Return(org, nil).Once() + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) - config.Set(config.OrgName, "org1") + idID := fftypes.NewUUID() - err := im.ResolveInputIdentity(ctx, identity) - assert.NoError(t, err) - assert.Equal(t, "0x12345", identity.Key) - assert.Equal(t, fmt.Sprintf("did:firefly:org/%s", org.ID), identity.Author) + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return((&fftypes.Verifier{ + Identity: idID, + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, idID). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID, + DID: "did:firefly:ns/ns1/myid", + Namespace: "ns1", + Name: "myid", + Type: fftypes.IdentityTypeCustom, + }, + }, nil) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", + Author: "did:firefly:ns/ns1/notmyid", + } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "FF10355", err) mbi.AssertExpectations(t) mdi.AssertExpectations(t) } -func TestResolveInputIdentityBlankShortKeyNameFail(t *testing.T) { +func TestResolveInputSigningIdentityByKeyNotFound(t *testing.T) { + + ctx, im := newTestIdentityManager(t) - identity := &fftypes.Identity{ - Key: "org1key", + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, "ns1", "fullkey123"). + Return(nil, nil) + mdi.On("GetIdentityByDID", ctx, "did:firefly:ns/ns1/unknown"). + Return(nil, nil) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", + Author: "did:firefly:ns/ns1/unknown", } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "FF10277", err) + + mbi.AssertExpectations(t) + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningIdentityByKeyFail(t *testing.T) { ctx, im := newTestIdentityManager(t) + mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("0x12345", nil).Once() - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "0x12345").Return(nil, fmt.Errorf("pop")).Once() + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("fullkey123", nil) - config.Set(config.OrgName, "org1") + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return(nil, fmt.Errorf("pop")) - err := im.ResolveInputIdentity(ctx, identity) + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", + } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) assert.Regexp(t, "pop", err) mbi.AssertExpectations(t) @@ -154,319 +299,981 @@ func TestResolveInputIdentityBlankShortKeyNameFail(t *testing.T) { } -func TestResolveInputIdentityOrgIdShortKeyName(t *testing.T) { +func TestResolveInputSigningIdentityByKeyResolveFail(t *testing.T) { - identity := &fftypes.Identity{ - Key: "org1key", - Author: "org1", - } - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x12345", + ctx, im := newTestIdentityManager(t) + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "mykey123").Return("", fmt.Errorf("pop")) + + msgIdentity := &fftypes.SignerRef{ + Key: "mykey123", } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "pop", err) + + mbi.AssertExpectations(t) +} + +func TestResolveInputSigningIdentityByOrgNameOk(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("0x12345", nil).Once() - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", ctx, "org1").Return(org, nil).Once() - err := im.ResolveInputIdentity(ctx, identity) - assert.NoError(t, err) - assert.Equal(t, "0x12345", identity.Key) - assert.Equal(t, fmt.Sprintf("did:firefly:org/%s", org.ID), identity.Author) + idID := fftypes.NewUUID() - // Cached result (note once on mocks above) - err = im.ResolveInputIdentity(ctx, &fftypes.Identity{ - Key: "org1key", + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, "org1"). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "myid", + Type: fftypes.IdentityTypeOrg, + }, + }, nil) + mdi.On("GetVerifiers", ctx, mock.Anything). + Return([]*fftypes.Verifier{ + (&fftypes.Verifier{ + Identity: idID, + Namespace: "ns1", + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), + }, nil, nil) + + msgIdentity := &fftypes.SignerRef{ Author: "org1", - }) + } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) assert.NoError(t, err) + assert.Equal(t, "did:firefly:org/org1", msgIdentity.Author) + assert.Equal(t, "fullkey123", msgIdentity.Key) + + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningIdentityByOrgLookkupNotFound(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, "org1"). + Return(nil, nil) + + msgIdentity := &fftypes.SignerRef{ + Author: "org1", + } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "FF10278", err) - mbi.AssertExpectations(t) mdi.AssertExpectations(t) + } -func TestResolveInputIdentityOrgKeyMismatch(t *testing.T) { +func TestResolveInputSigningIdentityByOrgLookkupFail(t *testing.T) { + + ctx, im := newTestIdentityManager(t) - identity := &fftypes.Identity{ - Key: "org1key", + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, "org1"). + Return(nil, fmt.Errorf("pop")) + + msgIdentity := &fftypes.SignerRef{ Author: "org1", } - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x222222", + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningIdentityByOrgVerifierFail(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + idID := fftypes.NewUUID() + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByName", ctx, fftypes.IdentityTypeOrg, fftypes.SystemNamespace, "org1"). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "myid", + Type: fftypes.IdentityTypeOrg, + }, + }, nil) + mdi.On("GetVerifiers", ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + + msgIdentity := &fftypes.SignerRef{ + Author: "org1", } + err := im.ResolveInputSigningIdentity(ctx, "ns1", msgIdentity) + assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + +} + +func TestNormalizeSigningKeyOrgFallbackOk(t *testing.T) { ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") + config.Set(config.OrgName, "org1") + mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("0x111111", nil).Once() - mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", ctx, "org1").Return(org, nil).Once() + mbi.On("NormalizeSigningKey", ctx, "key123").Return("fullkey123", nil) - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, "FF10279", err) + orgID := fftypes.NewUUID() + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return((&fftypes.Verifier{ + Identity: orgID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, orgID). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + }, nil) + + resolvedKey, err := im.NormalizeSigningKey(ctx, "", KeyNormalizationBlockchainPlugin) + assert.NoError(t, err) + assert.Equal(t, "fullkey123", resolvedKey) mbi.AssertExpectations(t) mdi.AssertExpectations(t) + } -func TestResolveInputIdentityResolveKeyFail(t *testing.T) { +func TestNormalizeSigningKeyOrgFallbackErr(t *testing.T) { - identity := &fftypes.Identity{ - Key: "org1key", - } + ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") + config.Set(config.OrgName, "org1") + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "key123").Return("fullkey123", nil) + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return(nil, fmt.Errorf("pop")) + + _, err := im.NormalizeSigningKey(ctx, "", KeyNormalizationBlockchainPlugin) + assert.Regexp(t, "pop", err) + + mbi.AssertExpectations(t) + mdi.AssertExpectations(t) + +} + +func TestResolveInputSigningKeyOk(t *testing.T) { ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") + config.Set(config.OrgName, "org1") + mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "org1key").Return("", fmt.Errorf("pop")) + mbi.On("NormalizeSigningKey", ctx, "key123").Return("fullkey123", nil) + + resolvedKey, err := im.NormalizeSigningKey(ctx, "key123", KeyNormalizationBlockchainPlugin) + assert.NoError(t, err) + assert.Equal(t, "fullkey123", resolvedKey) - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, err, "pop") mbi.AssertExpectations(t) } -func TestResolveInputIdentityBadOrgDID(t *testing.T) { +func TestResolveInputSigningKeyFail(t *testing.T) { - identity := &fftypes.Identity{ - Author: "did:firefly:org/!NoUUIDHere!", - } + ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") + config.Set(config.OrgName, "org1") + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "key123").Return("", fmt.Errorf("pop")) + + _, err := im.NormalizeSigningKey(ctx, "key123", KeyNormalizationBlockchainPlugin) + assert.Regexp(t, "pop", err) + + mbi.AssertExpectations(t) +} + +func TestResolveInputSigningKeyBypass(t *testing.T) { ctx, im := newTestIdentityManager(t) + config.Set(config.OrgKey, "key123") + config.Set(config.OrgName, "org1") - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, "FF10142", err) + key, err := im.NormalizeSigningKey(ctx, "different-type-of-key", KeyNormalizationNone) + assert.NoError(t, err) + assert.Equal(t, "different-type-of-key", key) } -func TestResolveInputIdentityOrgLookupByDIDFail(t *testing.T) { +func TestFirstVerifierForIdentityNotFound(t *testing.T) { - orgId := fftypes.NewUUID() - identity := &fftypes.Identity{ - Author: fmt.Sprintf("did:firefly:org/%s", orgId), + ctx, im := newTestIdentityManager(t) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "myid", + Type: fftypes.IdentityTypeOrg, + }, } - ctx, im := newTestIdentityManager(t) mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByID", ctx, orgId).Return(nil, fmt.Errorf("pop")) + mdi.On("GetVerifiers", ctx, mock.Anything).Return([]*fftypes.Verifier{}, nil, nil) + + _, retryable, err := im.firstVerifierForIdentity(ctx, fftypes.VerifierTypeEthAddress, id) + assert.Regexp(t, "FF10353", err) + assert.False(t, retryable) - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, "pop", err) mdi.AssertExpectations(t) + } -func TestResolveInputIdentityOrgLookupByDIDNotFound(t *testing.T) { +func TestResolveNodeOwnerSigningIdentityNotFound(t *testing.T) { - orgId := fftypes.NewUUID() - identity := &fftypes.Identity{ - Author: fmt.Sprintf("did:firefly:org/%s", orgId), + ctx, im := newTestIdentityManager(t) + im.nodeOwnerBlockchainKey = &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "key12345", } + config.Set(config.OrgName, "org1") - ctx, im := newTestIdentityManager(t) mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByID", ctx, orgId).Return(nil, nil) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "key12345").Return(nil, nil) + + err := im.ResolveNodeOwnerSigningIdentity(ctx, &fftypes.SignerRef{}) + assert.Regexp(t, "FF10281", err) - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, "FF10277", err) mdi.AssertExpectations(t) + } -func TestResolveInputIdentityOrgLookupByNameFail(t *testing.T) { +func TestGetNodeOwnerBlockchainKeyDeprecatedKeyResolveFailed(t *testing.T) { - identity := &fftypes.Identity{ - Author: "org1", + ctx, im := newTestIdentityManager(t) + config.Set(config.OrgIdentityDeprecated, "0x12345") + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "0x12345").Return("", fmt.Errorf("pop")) + + _, err := im.GetNodeOwnerBlockchainKey(ctx) + assert.Regexp(t, "pop", err) + + mbi.AssertExpectations(t) + +} + +func TestNormalizeKeyViaBlockchainPluginEmptyRequest(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + _, err := im.normalizeKeyViaBlockchainPlugin(ctx, "") + assert.Regexp(t, "FF10352", err) + +} + +func TestNormalizeKeyViaBlockchainPluginCached(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + mbi := im.blockchain.(*blockchainmocks.Plugin) + mbi.On("NormalizeSigningKey", ctx, "0x12345").Return("resolved12345", nil).Once() + + v, err := im.normalizeKeyViaBlockchainPlugin(ctx, "0x12345") + assert.NoError(t, err) + assert.Equal(t, fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "resolved12345", + }, *v) + + v1, err := im.normalizeKeyViaBlockchainPlugin(ctx, "0x12345") + assert.NoError(t, err) + assert.Equal(t, v, v1) + +} + +func TestGetNodeOwnerOrgCached(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + im.nodeOwningOrgIdentity = &fftypes.Identity{} + + id, err := im.GetNodeOwnerOrg(ctx) + assert.NoError(t, err) + assert.NotNil(t, id) + +} + +func TestGetNodeOwnerOrgKeyNotSet(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + _, err := im.GetNodeOwnerOrg(ctx) + assert.Regexp(t, "FF10354", err) + +} + +func TestGetNodeOwnerOrgMismatch(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + im.nodeOwnerBlockchainKey = &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", } + config.Set(config.OrgName, "org1") + + orgID := fftypes.NewUUID() + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "fullkey123"). + Return((&fftypes.Verifier{ + Identity: orgID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "fullkey123", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, orgID). + Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + DID: "did:firefly:org/org2", + Namespace: fftypes.SystemNamespace, + Name: "org2", + Type: fftypes.IdentityTypeOrg, + }, + }, nil) + + _, err := im.GetNodeOwnerOrg(ctx) + assert.Regexp(t, "FF10281", err) + +} + +func TestCachedIdentityLookupByVerifierRefCaching(t *testing.T) { ctx, im := newTestIdentityManager(t) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:node/peer1", + Namespace: fftypes.SystemNamespace, + Name: "peer1", + Type: fftypes.IdentityTypeOrg, + }, + } mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", ctx, "org1").Return(nil, fmt.Errorf("pop")) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "peer1"). + Return((&fftypes.Verifier{ + Identity: id.ID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, id.ID). + Return(id, nil) + + v1, err := im.cachedIdentityLookupByVerifierRef(ctx, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }) + assert.NoError(t, err) + assert.Equal(t, id, v1) + + v2, err := im.cachedIdentityLookupByVerifierRef(ctx, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }) + assert.NoError(t, err) + assert.Equal(t, id, v2) - err := im.ResolveInputIdentity(ctx, identity) +} + +func TestCachedIdentityLookupByVerifierRefError(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:node/peer1", + Namespace: fftypes.SystemNamespace, + Name: "peer1", + Type: fftypes.IdentityTypeOrg, + }, + } + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "peer1"). + Return((&fftypes.Verifier{ + Identity: id.ID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "peer1", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, id.ID).Return(nil, fmt.Errorf("pop")) + + _, err := im.cachedIdentityLookupByVerifierRef(ctx, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "peer1", + }) assert.Regexp(t, "pop", err) - mdi.AssertExpectations(t) + } -func TestResolveInputIdentityOrgLookupByNameNotFound(t *testing.T) { +func TestCachedIdentityLookupByVerifierRefNotFound(t *testing.T) { - identity := &fftypes.Identity{ - Author: "org1", + ctx, im := newTestIdentityManager(t) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:node/peer1", + Namespace: fftypes.SystemNamespace, + Name: "peer1", + Type: fftypes.IdentityTypeOrg, + }, } + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeEthAddress, fftypes.SystemNamespace, "0x12345"). + Return((&fftypes.Verifier{ + Identity: id.ID, + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "peer1", + }, + }).Seal(), nil) + mdi.On("GetIdentityByID", ctx, id.ID).Return(nil, nil) + + _, err := im.cachedIdentityLookupByVerifierRef(ctx, fftypes.SystemNamespace, &fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0x12345", + }) + assert.Regexp(t, "FF10220", err) + +} + +func TestCachedIdentityLookupCaching(t *testing.T) { ctx, im := newTestIdentityManager(t) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:node/peer1", + Namespace: fftypes.SystemNamespace, + Name: "peer1", + Type: fftypes.IdentityTypeOrg, + }, + } mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", ctx, "org1").Return(nil, nil) + mdi.On("GetIdentityByDID", ctx, "did:firefly:node/peer1").Return(id, nil).Once() - err := im.ResolveInputIdentity(ctx, identity) - assert.Regexp(t, "FF10278", err) - mdi.AssertExpectations(t) + v1, _, err := im.CachedIdentityLookup(ctx, "did:firefly:node/peer1") + assert.NoError(t, err) + assert.Equal(t, id, v1) + + v2, _, err := im.CachedIdentityLookup(ctx, "did:firefly:node/peer1") + assert.NoError(t, err) + assert.Equal(t, id, v2) } -func TestResolveSigningKeyIdentityBadSigningKey(t *testing.T) { +func TestCachedIdentityLookupUnknownResolver(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "badness").Return("", fmt.Errorf("pop")) - _, err := im.ResolveSigningKeyIdentity(ctx, "badness") - assert.Regexp(t, "pop", err) - mbi.AssertExpectations(t) + _, retryable, err := im.CachedIdentityLookup(ctx, "did:random:anything") + assert.Regexp(t, "FF10349", err) + assert.False(t, retryable) + } -func TestResolveSigningKeyIdentityOrgLookupFail(t *testing.T) { +func TestCachedIdentityLookupGetIDFail(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil) + mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(nil, fmt.Errorf("pop")) + mdi.On("GetIdentityByDID", ctx, "did:firefly:node/peer1").Return(nil, fmt.Errorf("pop")) - _, err := im.ResolveSigningKeyIdentity(ctx, "key1") + _, retryable, err := im.CachedIdentityLookup(ctx, "did:firefly:node/peer1") assert.Regexp(t, "pop", err) - mbi.AssertExpectations(t) + assert.True(t, retryable) + } -func TestResolveSigningKeyIdentityOrgLookupOkCached(t *testing.T) { +func TestCachedIdentityLookupByVerifierByOldDIDFail(t *testing.T) { - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Identity: "key1resolved", - } + ctx, im := newTestIdentityManager(t) + + orgUUID := fftypes.NewUUID() + did := fftypes.FireFlyOrgDIDPrefix + orgUUID.String() + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByDID", ctx, did).Return(nil, nil) + mdi.On("GetIdentityByID", ctx, mock.MatchedBy(func(uuid *fftypes.UUID) bool { + return uuid.Equals(orgUUID) + })).Return(nil, fmt.Errorf("pop")) + + _, retryable, err := im.CachedIdentityLookup(ctx, did) + assert.Regexp(t, "pop", err) + assert.True(t, retryable) + +} + +func TestCachedIdentityLookupByIDCaching(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil) + + id := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:node/peer1", + Namespace: fftypes.SystemNamespace, + Name: "peer1", + Type: fftypes.IdentityTypeOrg, + }, + } mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(org, nil).Once() + mdi.On("GetIdentityByID", ctx, id.ID).Return(id, nil).Once() - author, err := im.ResolveSigningKeyIdentity(ctx, "key1") + v1, err := im.CachedIdentityLookupByID(ctx, id.ID) assert.NoError(t, err) - assert.Equal(t, im.OrgDID(org), author) + assert.Equal(t, id, v1) - // Cached second time, without any DB call (see "Once()" above) - author, err = im.ResolveSigningKeyIdentity(ctx, "key1") + v2, err := im.CachedIdentityLookupByID(ctx, id.ID) assert.NoError(t, err) - assert.Equal(t, im.OrgDID(org), author) + assert.Equal(t, id, v2) - mbi.AssertExpectations(t) + mdi.AssertExpectations(t) } -func TestResolveSigningKeyIdentityOrgLookupUnresolved(t *testing.T) { +func TestVerifyIdentityChainCustomOrgOrgOk(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil) + + idRoot := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + idIntermediateOrg := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: idRoot.ID, + DID: "did:firefly:org/org2", + Namespace: fftypes.SystemNamespace, + Name: "org2", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + idIntermediateCustom := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: idIntermediateOrg.ID, + DID: "did:firefly:ns/ns1/custom1", + Namespace: "ns1", + Name: "custom1", + Type: fftypes.IdentityTypeCustom, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + idLeaf := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: idIntermediateCustom.ID, + DID: "did:firefly:ns/ns1/custom2", + Namespace: "ns1", + Name: "custom2", + Type: fftypes.IdentityTypeCustom, + }, + } mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(nil, nil) + mdi.On("GetIdentityByID", ctx, idIntermediateOrg.ID).Return(idIntermediateOrg, nil).Once() + mdi.On("GetIdentityByID", ctx, idIntermediateCustom.ID).Return(idIntermediateCustom, nil).Once() + mdi.On("GetIdentityByID", ctx, idRoot.ID).Return(idRoot, nil).Once() - author, err := im.ResolveSigningKeyIdentity(ctx, "key1") + immeidateParent, _, err := im.VerifyIdentityChain(ctx, idLeaf) + assert.Equal(t, idIntermediateCustom, immeidateParent) assert.NoError(t, err) - assert.Equal(t, "", author) - mbi.AssertExpectations(t) + mdi.AssertExpectations(t) } -func TestGetLocalOrgKey(t *testing.T) { +func TestVerifyIdentityInvalid(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil).Once() - config.Set(config.OrgIdentityDeprecated, "key1") + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{}, + } - localOrgKey, err := im.GetLocalOrgKey(ctx) - assert.NoError(t, err) - assert.Equal(t, "key1resolved", localOrgKey) + _, retryable, err := im.VerifyIdentityChain(ctx, id1) + assert.Regexp(t, "FF10203", err) + assert.False(t, retryable) - // Check cache - localOrgKey, err = im.GetLocalOrgKey(ctx) - assert.NoError(t, err) - assert.Equal(t, "key1resolved", localOrgKey) +} - mbi.AssertExpectations(t) +func TestVerifyIdentityChainLoop(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + idID1 := fftypes.NewUUID() + idID2 := fftypes.NewUUID() + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID1, + Parent: idID2, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + id2 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID2, + Parent: idID1, + DID: "did:firefly:org/org2", + Namespace: fftypes.SystemNamespace, + Name: "org2", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", ctx, idID2).Return(id2, nil).Once() + + _, retryable, err := im.VerifyIdentityChain(ctx, id1) + assert.Regexp(t, "FF10364", err) + assert.False(t, retryable) + + mdi.AssertExpectations(t) } -func TestGetLocalOrgKeyFail(t *testing.T) { +func TestVerifyIdentityChainBadParent(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("", fmt.Errorf("pop")).Once() - config.Set(config.OrgIdentityDeprecated, "key1") + idID1 := fftypes.NewUUID() + idID2 := fftypes.NewUUID() + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID1, + Parent: idID2, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + } + id2 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: idID2, + DID: "did:firefly:org/org2", + Namespace: fftypes.SystemNamespace, + Name: "org2", + Type: fftypes.IdentityTypeOrg, + }, + } - _, err := im.GetLocalOrgKey(ctx) - assert.EqualError(t, err, "pop") + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", ctx, idID2).Return(id2, nil).Once() - mbi.AssertExpectations(t) + _, retryable, err := im.VerifyIdentityChain(ctx, id1) + assert.Regexp(t, "FF10366", err) + assert.False(t, retryable) + + mdi.AssertExpectations(t) } -func TestResolveLocalOrgDIDSuccess(t *testing.T) { +func TestVerifyIdentityChainErr(t *testing.T) { + + ctx, im := newTestIdentityManager(t) - org := &fftypes.Organization{ - ID: fftypes.NewUUID(), - Name: "org1", - Identity: "0x222222", + idID2 := fftypes.NewUUID() + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: idID2, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, } + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", ctx, idID2).Return(nil, fmt.Errorf("pop")) + + _, retryable, err := im.VerifyIdentityChain(ctx, id1) + assert.Regexp(t, "pop", err) + assert.True(t, retryable) + + mdi.AssertExpectations(t) +} + +func TestVerifyIdentityChainNotFound(t *testing.T) { + ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil).Once() + + idID2 := fftypes.NewUUID() + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: idID2, + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + } + mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(org, nil).Once() + mdi.On("GetIdentityByID", ctx, idID2).Return(nil, nil) - config.Set(config.OrgIdentityDeprecated, "key1") + _, retryable, err := im.VerifyIdentityChain(ctx, id1) + assert.Regexp(t, "FF10214", err) + assert.False(t, retryable) - localOrgDID, err := im.ResolveLocalOrgDID(ctx) - assert.NoError(t, err) - assert.Equal(t, im.OrgDID(org), localOrgDID) + mdi.AssertExpectations(t) +} + +func TestVerifyIdentityChainInvalidParent(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: nil, + DID: "did:firefly:ns/ns1/custom1", + Namespace: "ns1", + Name: "custom1", + Type: fftypes.IdentityTypeCustom, + }, + } + id2 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Parent: id1.ID, + DID: "did:firefly:org/org2", + Namespace: fftypes.SystemNamespace, + Name: "org2", + Type: fftypes.IdentityTypeOrg, + }, + } + + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", ctx, id1.ID).Return(id1, nil).Once() + + _, retryable, err := im.VerifyIdentityChain(ctx, id2) + assert.Regexp(t, "FF10365", err) + assert.False(t, retryable) + + mdi.AssertExpectations(t) +} + +func TestValidateParentTypeCustomToNode(t *testing.T) { + + ctx, im := newTestIdentityManager(t) - // Second one cached - localOrgDID, err = im.ResolveLocalOrgDID(ctx) + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + Type: fftypes.IdentityTypeNode, + }, + } + id2 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + Type: fftypes.IdentityTypeCustom, + }, + } + + err := im.validateParentType(ctx, id2, id1) + assert.Regexp(t, "FF10365", err) + +} + +func TestValidateParentTypeInvalidType(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + id1 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + Type: fftypes.IdentityTypeCustom, + }, + } + id2 := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + Type: fftypes.IdentityType("unknown"), + }, + } + + err := im.validateParentType(ctx, id2, id1) + assert.Regexp(t, "FF10362", err) + +} + +func TestCachedVerifierLookupCaching(t *testing.T) { + + ctx, im := newTestIdentityManager(t) + + verifier := (&fftypes.Verifier{ + Namespace: fftypes.SystemNamespace, + VerifierRef: fftypes.VerifierRef{ + Value: "peer1", + Type: fftypes.VerifierTypeFFDXPeerID, + }, + }).Seal() + mdi := im.database.(*databasemocks.Plugin) + mdi.On("GetVerifierByValue", ctx, verifier.Type, verifier.Namespace, verifier.Value).Return(verifier, nil).Once() + + v1, err := im.CachedVerifierLookup(ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "peer1") assert.NoError(t, err) - assert.Equal(t, im.OrgDID(org), localOrgDID) + assert.Equal(t, verifier, v1) - mdi.On("GetOrganizationByID", ctx, org.ID).Return(org, nil) - localOrg, err := im.GetLocalOrganization(ctx) + v2, err := im.CachedVerifierLookup(ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "peer1") assert.NoError(t, err) - assert.Equal(t, org, localOrg) + assert.Equal(t, verifier, v2) - mbi.AssertExpectations(t) mdi.AssertExpectations(t) } -func TestResolveLocalOrgDIDFail(t *testing.T) { +func TestCachedVerifierLookupError(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil).Once() + mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(nil, fmt.Errorf("pop")).Twice() + mdi.On("GetVerifierByValue", ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "peer1").Return(nil, fmt.Errorf("pop")) - config.Set(config.OrgIdentityDeprecated, "key1") + _, err := im.CachedVerifierLookup(ctx, fftypes.VerifierTypeFFDXPeerID, fftypes.SystemNamespace, "peer1") + assert.Regexp(t, "pop", err) - _, err := im.ResolveLocalOrgDID(ctx) - assert.Regexp(t, "FF10281", err) + mdi.AssertExpectations(t) +} - _, err = im.GetLocalOrganization(ctx) - assert.Regexp(t, "FF10281", err) +func TestResolveIdentitySignerOk(t *testing.T) { + ctx, im := newTestIdentityManager(t) + mdi := im.database.(*databasemocks.Plugin) + + msgID := fftypes.NewUUID() + mdi.On("GetMessageByID", ctx, msgID).Return(&fftypes.Message{ + Header: fftypes.MessageHeader{ + SignerRef: fftypes.SignerRef{ + Key: "0x12345", + }, + }, + }, nil) + + signerRef, err := im.ResolveIdentitySigner(ctx, &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: msgID, + }, + }) + assert.NoError(t, err) + assert.Equal(t, "0x12345", signerRef.Key) - mbi.AssertExpectations(t) mdi.AssertExpectations(t) } -func TestResolveLocalOrgDIDNotFound(t *testing.T) { - +func TestResolveIdentitySignerFail(t *testing.T) { ctx, im := newTestIdentityManager(t) - mbi := im.blockchain.(*blockchainmocks.Plugin) - mbi.On("ResolveSigningKey", ctx, "key1").Return("key1resolved", nil).Once() mdi := im.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", ctx, "key1resolved").Return(nil, nil).Once() - config.Set(config.OrgIdentityDeprecated, "key1") + msgID := fftypes.NewUUID() + mdi.On("GetMessageByID", ctx, msgID).Return(nil, fmt.Errorf("pop")) + + _, err := im.ResolveIdentitySigner(ctx, &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: msgID, + }, + }) + assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) +} - _, err := im.ResolveLocalOrgDID(ctx) - assert.Regexp(t, "FF10281", err) +func TestResolveIdentitySignerNotFound(t *testing.T) { + ctx, im := newTestIdentityManager(t) + mdi := im.database.(*databasemocks.Plugin) - mbi.AssertExpectations(t) + msgID := fftypes.NewUUID() + mdi.On("GetMessageByID", ctx, msgID).Return(nil, nil) + + _, err := im.ResolveIdentitySigner(ctx, &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + DID: "did:firefly:org/org1", + Namespace: fftypes.SystemNamespace, + Name: "org1", + Type: fftypes.IdentityTypeOrg, + }, + Messages: fftypes.IdentityMessages{ + Claim: msgID, + }, + }) + assert.Regexp(t, "FF10366", err) + + mdi.AssertExpectations(t) +} +func TestParseKeyNormalizationConfig(t *testing.T) { + assert.Equal(t, KeyNormalizationBlockchainPlugin, ParseKeyNormalizationConfig("blockchain_Plugin")) + assert.Equal(t, KeyNormalizationNone, ParseKeyNormalizationConfig("none")) + assert.Equal(t, KeyNormalizationNone, ParseKeyNormalizationConfig("")) } diff --git a/internal/metrics/metrics_test.go b/internal/metrics/metrics_test.go index 27d3d2f070..57ab703dca 100644 --- a/internal/metrics/metrics_test.go +++ b/internal/metrics/metrics_test.go @@ -28,24 +28,22 @@ import ( var msgID = fftypes.NewUUID() var Message = &fftypes.Message{ - Header: fftypes.MessageHeader{ - ID: msgID, - Identity: fftypes.Identity{ - Author: "did:firefly:org/abcd", - Key: "0x12345", - }, - Type: "", + Header: fftypes.MessageHeader{ + ID: msgID, + SignerRef: fftypes.SignerRef{ + Author: "did:firefly:org/abcd", + Key: "0x12345", }, - } - + Type: "", + }, +} var tokenLocalID = fftypes.NewUUID() var TokenTransfer = &fftypes.TokenTransfer{ - Amount: *fftypes.NewFFBigInt(1), - LocalID: tokenLocalID, - Type: "", - } - + Amount: *fftypes.NewFFBigInt(1), + LocalID: tokenLocalID, + Type: "", +} func newTestMetricsManager(t *testing.T) (*metricsManager, func()) { config.Reset() diff --git a/internal/networkmap/data_query.go b/internal/networkmap/data_query.go index 397238cff0..4acf915895 100644 --- a/internal/networkmap/data_query.go +++ b/internal/networkmap/data_query.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -19,30 +19,115 @@ package networkmap import ( "context" + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (nm *networkMap) GetOrganizationByID(ctx context.Context, id string) (*fftypes.Organization, error) { +func (nm *networkMap) GetOrganizationByID(ctx context.Context, id string) (*fftypes.Identity, error) { u, err := fftypes.ParseUUID(ctx, id) if err != nil { return nil, err } - return nm.database.GetOrganizationByID(ctx, u) + o, err := nm.database.GetIdentityByID(ctx, u) + if err != nil { + return nil, err + } + if o == nil { + return nil, i18n.NewError(ctx, i18n.Msg404NotFound) + } + if o.Type != fftypes.IdentityTypeOrg { + log.L(ctx).Warnf("Identity '%s' (%s) is not an org identity", o.DID, o.ID) + return nil, nil + } + return o, nil } -func (nm *networkMap) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Organization, *database.FilterResult, error) { - return nm.database.GetOrganizations(ctx, filter) +func (nm *networkMap) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { + filter.Condition(filter.Builder().Eq("type", fftypes.IdentityTypeOrg)) + filter.Condition(filter.Builder().Eq("namespace", fftypes.SystemNamespace)) + return nm.database.GetIdentities(ctx, filter) } -func (nm *networkMap) GetNodeByID(ctx context.Context, id string) (*fftypes.Node, error) { +func (nm *networkMap) GetNodeByID(ctx context.Context, id string) (*fftypes.Identity, error) { u, err := fftypes.ParseUUID(ctx, id) if err != nil { return nil, err } - return nm.database.GetNodeByID(ctx, u) + n, err := nm.database.GetIdentityByID(ctx, u) + if err != nil { + return nil, err + } + if n == nil { + return nil, i18n.NewError(ctx, i18n.Msg404NotFound) + } + if n.Type != fftypes.IdentityTypeNode { + log.L(ctx).Warnf("Identity '%s' (%s) is not a node identity", n.DID, n.ID) + return nil, nil + } + return n, nil +} + +func (nm *networkMap) GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { + filter.Condition(filter.Builder().Eq("type", fftypes.IdentityTypeNode)) + filter.Condition(filter.Builder().Eq("namespace", fftypes.SystemNamespace)) + return nm.database.GetIdentities(ctx, filter) +} + +func (nm *networkMap) GetIdentityByID(ctx context.Context, ns, id string) (*fftypes.Identity, error) { + u, err := fftypes.ParseUUID(ctx, id) + if err != nil { + return nil, err + } + identity, err := nm.database.GetIdentityByID(ctx, u) + if err != nil { + return nil, err + } + if identity == nil || identity.Namespace != ns { + return nil, i18n.NewError(ctx, i18n.Msg404NotFound) + } + return identity, nil +} + +func (nm *networkMap) GetIdentities(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { + filter.Condition(filter.Builder().Eq("namespace", ns)) + return nm.database.GetIdentities(ctx, filter) +} + +func (nm *networkMap) GetIdentityVerifiers(ctx context.Context, ns, id string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) { + identity, err := nm.GetIdentityByID(ctx, ns, id) + if err != nil { + return nil, nil, err + } + filter.Condition(filter.Builder().Eq("identity", identity.ID)) + return nm.database.GetVerifiers(ctx, filter) +} + +func (nm *networkMap) GetDIDDocForIndentityByID(ctx context.Context, ns, id string) (*DIDDocument, error) { + identity, err := nm.GetIdentityByID(ctx, ns, id) + if err != nil { + return nil, err + } + return nm.generateDIDDocument(ctx, identity) +} + +func (nm *networkMap) GetVerifierByHash(ctx context.Context, ns, hash string) (*fftypes.Verifier, error) { + b32, err := fftypes.ParseBytes32(ctx, hash) + if err != nil { + return nil, err + } + verifier, err := nm.database.GetVerifierByHash(ctx, b32) + if err != nil { + return nil, err + } + if verifier == nil || verifier.Namespace != ns { + return nil, i18n.NewError(ctx, i18n.Msg404NotFound) + } + return verifier, nil } -func (nm *networkMap) GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Node, *database.FilterResult, error) { - return nm.database.GetNodes(ctx, filter) +func (nm *networkMap) GetVerifiers(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) { + filter.Condition(filter.Builder().Eq("namespace", ns)) + return nm.database.GetVerifiers(ctx, filter) } diff --git a/internal/networkmap/data_query_test.go b/internal/networkmap/data_query_test.go index c30f8aa851..27cdb08b35 100644 --- a/internal/networkmap/data_query_test.go +++ b/internal/networkmap/data_query_test.go @@ -17,6 +17,7 @@ package networkmap import ( + "fmt" "testing" "github.com/hyperledger/firefly/mocks/databasemocks" @@ -30,12 +31,42 @@ func TestGetOrganizationByIDOk(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() id := fftypes.NewUUID() - nm.database.(*databasemocks.Plugin).On("GetOrganizationByID", nm.ctx, id).Return(&fftypes.Organization{ID: id}, nil) + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeOrg}}, nil) res, err := nm.GetOrganizationByID(nm.ctx, id.String()) assert.NoError(t, err) assert.Equal(t, *id, *res.ID) } +func TestGetOrganizationByIDNotOrg(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeNode}}, nil) + res, err := nm.GetOrganizationByID(nm.ctx, id.String()) + assert.NoError(t, err) + assert.Nil(t, res) +} + +func TestGetOrganizationByIDNotFound(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, nil) + _, err := nm.GetOrganizationByID(nm.ctx, id.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetOrganizationByIDError(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, fmt.Errorf("pop")) + _, err := nm.GetOrganizationByID(nm.ctx, id.String()) + assert.Regexp(t, "pop", err) +} + func TestGetOrganizationByIDBadUUID(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() @@ -47,12 +78,42 @@ func TestGetNodeByIDOk(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() id := fftypes.NewUUID() - nm.database.(*databasemocks.Plugin).On("GetNodeByID", nm.ctx, id).Return(&fftypes.Node{ID: id}, nil) + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeNode}}, nil) res, err := nm.GetNodeByID(nm.ctx, id.String()) assert.NoError(t, err) assert.Equal(t, *id, *res.ID) } +func TestGetNodeByIDWrongType(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeOrg}}, nil) + res, err := nm.GetNodeByID(nm.ctx, id.String()) + assert.NoError(t, err) + assert.Nil(t, res) +} + +func TestGetNodeByIDNotFound(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, nil) + _, err := nm.GetNodeByID(nm.ctx, id.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetNodeByIDError(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, fmt.Errorf("pop")) + _, err := nm.GetNodeByID(nm.ctx, id.String()) + assert.Regexp(t, "pop", err) +} + func TestGetNodeByIDBadUUID(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() @@ -63,8 +124,8 @@ func TestGetNodeByIDBadUUID(t *testing.T) { func TestGetOrganizations(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - nm.database.(*databasemocks.Plugin).On("GetOrganizations", nm.ctx, mock.Anything).Return([]*fftypes.Organization{}, nil, nil) - res, _, err := nm.GetOrganizations(nm.ctx, database.OrganizationQueryFactory.NewFilter(nm.ctx).And()) + nm.database.(*databasemocks.Plugin).On("GetIdentities", nm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) + res, _, err := nm.GetOrganizations(nm.ctx, database.IdentityQueryFactory.NewFilter(nm.ctx).And()) assert.NoError(t, err) assert.Empty(t, res) } @@ -72,8 +133,140 @@ func TestGetOrganizations(t *testing.T) { func TestGetNodes(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - nm.database.(*databasemocks.Plugin).On("GetNodes", nm.ctx, mock.Anything).Return([]*fftypes.Node{}, nil, nil) - res, _, err := nm.GetNodes(nm.ctx, database.NodeQueryFactory.NewFilter(nm.ctx).And()) + nm.database.(*databasemocks.Plugin).On("GetIdentities", nm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) + res, _, err := nm.GetNodes(nm.ctx, database.IdentityQueryFactory.NewFilter(nm.ctx).And()) assert.NoError(t, err) assert.Empty(t, res) } + +func TestGetIdentityByIDOk(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeOrg, Namespace: "ns1"}}, nil) + res, err := nm.GetIdentityByID(nm.ctx, "ns1", id.String()) + assert.NoError(t, err) + assert.Equal(t, *id, *res.ID) +} + +func TestGetIdentityByIDNotFound(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, nil) + _, err := nm.GetIdentityByID(nm.ctx, "ns1", id.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetIdentityByIDError(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, fmt.Errorf("pop")) + _, err := nm.GetIdentityByID(nm.ctx, "ns1", id.String()) + assert.Regexp(t, "pop", err) +} + +func TestGetIdentityByIDBadNS(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeOrg, Namespace: "ns1"}}, nil) + _, err := nm.GetIdentityByID(nm.ctx, "ns2", id.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetIdentityByIDBadUUID(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + _, err := nm.GetIdentityByID(nm.ctx, "ns1", "bad") + assert.Regexp(t, "FF10142", err) +} + +func TestGetIdentities(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + nm.database.(*databasemocks.Plugin).On("GetIdentities", nm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) + res, _, err := nm.GetIdentities(nm.ctx, "ns1", database.IdentityQueryFactory.NewFilter(nm.ctx).And()) + assert.NoError(t, err) + assert.Empty(t, res) +} + +func TestGetIdentityVerifiers(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id). + Return(&fftypes.Identity{IdentityBase: fftypes.IdentityBase{ID: id, Type: fftypes.IdentityTypeOrg, Namespace: "ns1"}}, nil) + nm.database.(*databasemocks.Plugin).On("GetVerifiers", nm.ctx, mock.Anything).Return([]*fftypes.Verifier{}, nil, nil) + res, _, err := nm.GetIdentityVerifiers(nm.ctx, "ns1", id.String(), database.IdentityQueryFactory.NewFilter(nm.ctx).And()) + assert.NoError(t, err) + assert.Empty(t, res) +} + +func TestGetIdentityVerifiersIdentityFail(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + id := fftypes.NewUUID() + nm.database.(*databasemocks.Plugin).On("GetIdentityByID", nm.ctx, id).Return(nil, fmt.Errorf("pop")) + res, _, err := nm.GetIdentityVerifiers(nm.ctx, "ns1", id.String(), database.IdentityQueryFactory.NewFilter(nm.ctx).And()) + assert.Regexp(t, "pop", err) + assert.Empty(t, res) +} + +func TestGetVerifiers(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + nm.database.(*databasemocks.Plugin).On("GetVerifiers", nm.ctx, mock.Anything).Return([]*fftypes.Verifier{}, nil, nil) + res, _, err := nm.GetVerifiers(nm.ctx, "ns1", database.VerifierQueryFactory.NewFilter(nm.ctx).And()) + assert.NoError(t, err) + assert.Empty(t, res) +} + +func TestGetVerifierByHashOk(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + hash := fftypes.NewRandB32() + nm.database.(*databasemocks.Plugin).On("GetVerifierByHash", nm.ctx, hash). + Return(&fftypes.Verifier{Hash: hash, Namespace: "ns1"}, nil) + res, err := nm.GetVerifierByHash(nm.ctx, "ns1", hash.String()) + assert.NoError(t, err) + assert.Equal(t, *hash, *res.Hash) +} + +func TestGetVerifierByHashNotFound(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + hash := fftypes.NewRandB32() + nm.database.(*databasemocks.Plugin).On("GetVerifierByHash", nm.ctx, hash).Return(nil, nil) + _, err := nm.GetVerifierByHash(nm.ctx, "ns1", hash.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetVerifierByHashError(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + hash := fftypes.NewRandB32() + nm.database.(*databasemocks.Plugin).On("GetVerifierByHash", nm.ctx, hash).Return(nil, fmt.Errorf("pop")) + _, err := nm.GetVerifierByHash(nm.ctx, "ns1", hash.String()) + assert.Regexp(t, "pop", err) +} + +func TestGetVerifierByHashBadNS(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + hash := fftypes.NewRandB32() + nm.database.(*databasemocks.Plugin).On("GetVerifierByHash", nm.ctx, hash). + Return(&fftypes.Verifier{Hash: hash, Namespace: "ns1"}, nil) + _, err := nm.GetVerifierByHash(nm.ctx, "ns2", hash.String()) + assert.Regexp(t, "FF10109", err) +} + +func TestGetVerifierByHashBadUUID(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + _, err := nm.GetVerifierByHash(nm.ctx, "ns1", "bad") + assert.Regexp(t, "FF10232", err) +} diff --git a/internal/networkmap/did.go b/internal/networkmap/did.go new file mode 100644 index 0000000000..eb85f117f9 --- /dev/null +++ b/internal/networkmap/did.go @@ -0,0 +1,116 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "context" + "fmt" + + "github.com/hyperledger/firefly/internal/log" + "github.com/hyperledger/firefly/pkg/database" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +// DIDDocument - see https://www.w3.org/TR/did-core/#core-properties +type DIDDocument struct { + Context []string `json:"@context"` + ID string `json:"id"` + Authentication []string `json:"authentication"` + VerificationMethods []*VerificationMethod `json:"verificationMethod"` +} + +type VerificationMethod struct { + ID string `json:"id"` + Type string `json:"type"` + Controller string `json:"controller"` + // Controler specific fields + BlockchainAccountID string `json:"blockchainAcountId,omitempty"` + MSPIdentityString string `json:"mspIdentityString,omitempty"` + DataExchangePeerID string `json:"dataExchangePeerID,omitempty"` +} + +func (nm *networkMap) generateDIDDocument(ctx context.Context, identity *fftypes.Identity) (doc *DIDDocument, err error) { + + fb := database.VerifierQueryFactory.NewFilter(ctx) + filter := fb.And( + fb.Eq("identity", identity.ID), + fb.Eq("namespace", identity.Namespace), + ) + verifiers, _, err := nm.database.GetVerifiers(ctx, filter) + if err != nil { + return nil, err + } + + doc = &DIDDocument{ + Context: []string{ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/ed25519-2020/v1", + }, + ID: identity.DID, + } + doc.VerificationMethods = make([]*VerificationMethod, 0, len(verifiers)) + doc.Authentication = make([]string, 0, len(verifiers)) + for _, verifier := range verifiers { + vm := nm.generateDIDAuthentication(ctx, identity, verifier) + if vm != nil { + doc.VerificationMethods = append(doc.VerificationMethods, vm) + doc.Authentication = append(doc.Authentication, fmt.Sprintf("#%s", verifier.Hash.String())) + } + } + return doc, nil +} + +func (nm *networkMap) generateDIDAuthentication(ctx context.Context, identity *fftypes.Identity, verifier *fftypes.Verifier) *VerificationMethod { + switch verifier.Type { + case fftypes.VerifierTypeEthAddress: + return nm.generateEthAddressVerifier(identity, verifier) + case fftypes.VerifierTypeMSPIdentity: + return nm.generateMSPVerifier(identity, verifier) + case fftypes.VerifierTypeFFDXPeerID: + return nm.generateDXPeerIDVerifier(identity, verifier) + default: + log.L(ctx).Warnf("Unknown verifier type '%s' on verifier '%s' of DID '%s' (%s) - cannot add to DID document", verifier.Type, verifier.Value, identity.DID, identity.ID) + return nil + } +} + +func (nm *networkMap) generateEthAddressVerifier(identity *fftypes.Identity, verifier *fftypes.Verifier) *VerificationMethod { + return &VerificationMethod{ + ID: verifier.Hash.String(), + Type: "EcdsaSecp256k1VerificationKey2019", + Controller: identity.DID, + BlockchainAccountID: verifier.Value, + } +} + +func (nm *networkMap) generateMSPVerifier(identity *fftypes.Identity, verifier *fftypes.Verifier) *VerificationMethod { + return &VerificationMethod{ + ID: verifier.Hash.String(), + Type: "HyperledgerFabricMSPIdentity", + Controller: identity.DID, + MSPIdentityString: verifier.Value, + } +} + +func (nm *networkMap) generateDXPeerIDVerifier(identity *fftypes.Identity, verifier *fftypes.Verifier) *VerificationMethod { + return &VerificationMethod{ + ID: verifier.Hash.String(), + Type: "FireFlyDataExchangePeerIdentity", + Controller: identity.DID, + DataExchangePeerID: verifier.Value, + } +} diff --git a/internal/networkmap/did_test.go b/internal/networkmap/did_test.go new file mode 100644 index 0000000000..7e96aa9bec --- /dev/null +++ b/internal/networkmap/did_test.go @@ -0,0 +1,144 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "fmt" + "testing" + + "github.com/hyperledger/firefly/mocks/databasemocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestDIDGenerationOK(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + + org1 := testOrg("org1") + + verifierEth := (&fftypes.Verifier{ + Identity: org1.ID, + Namespace: org1.Namespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeEthAddress, + Value: "0xc90d94dE1021fD17fAA2F1FC4F4D36Dff176120d", + }, + Created: fftypes.Now(), + }).Seal() + verifierMSP := (&fftypes.Verifier{ + Identity: org1.ID, + Namespace: org1.Namespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeMSPIdentity, + Value: "mspIdForAcme::x509::CN=fabric-ca::CN=user1", + }, + Created: fftypes.Now(), + }).Seal() + verifierDX := (&fftypes.Verifier{ + Identity: org1.ID, + Namespace: org1.Namespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierTypeFFDXPeerID, + Value: "peer1", + }, + Created: fftypes.Now(), + }).Seal() + verifierUnknown := (&fftypes.Verifier{ + Identity: org1.ID, + Namespace: org1.Namespace, + VerifierRef: fftypes.VerifierRef{ + Type: fftypes.VerifierType("unknown"), + Value: "ignore me", + }, + Created: fftypes.Now(), + }).Seal() + + mdi := nm.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", nm.ctx, mock.Anything).Return(org1, nil) + mdi.On("GetVerifiers", nm.ctx, mock.Anything).Return([]*fftypes.Verifier{ + verifierEth, + verifierMSP, + verifierDX, + verifierUnknown, + }, nil, nil) + + doc, err := nm.GetDIDDocForIndentityByID(nm.ctx, org1.Namespace, org1.ID.String()) + assert.NoError(t, err) + assert.Equal(t, &DIDDocument{ + Context: []string{ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/ed25519-2020/v1", + }, + ID: org1.DID, + VerificationMethods: []*VerificationMethod{ + { + ID: verifierEth.Hash.String(), + Type: "EcdsaSecp256k1VerificationKey2019", + Controller: org1.DID, + BlockchainAccountID: verifierEth.Value, + }, + { + ID: verifierMSP.Hash.String(), + Type: "HyperledgerFabricMSPIdentity", + Controller: org1.DID, + MSPIdentityString: verifierMSP.Value, + }, + { + ID: verifierDX.Hash.String(), + Type: "FireFlyDataExchangePeerIdentity", + Controller: org1.DID, + DataExchangePeerID: verifierDX.Value, + }, + }, + Authentication: []string{ + fmt.Sprintf("#%s", verifierEth.Hash.String()), + fmt.Sprintf("#%s", verifierMSP.Hash.String()), + fmt.Sprintf("#%s", verifierDX.Hash.String()), + }, + }, doc) + + mdi.AssertExpectations(t) +} + +func TestDIDGenerationGetVerifiersFail(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + + org1 := testOrg("org1") + + mdi := nm.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", nm.ctx, mock.Anything).Return(org1, nil) + mdi.On("GetVerifiers", nm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + + _, err := nm.GetDIDDocForIndentityByID(nm.ctx, org1.Namespace, org1.ID.String()) + assert.Regexp(t, "pop", err) +} + +func TestDIDGenerationGetIdentityFail(t *testing.T) { + nm, cancel := newTestNetworkmap(t) + defer cancel() + + org1 := testOrg("org1") + + mdi := nm.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", nm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) + + _, err := nm.GetDIDDocForIndentityByID(nm.ctx, org1.Namespace, org1.ID.String()) + assert.Regexp(t, "pop", err) +} diff --git a/internal/networkmap/manager.go b/internal/networkmap/manager.go index aedb597663..6570fd7736 100644 --- a/internal/networkmap/manager.go +++ b/internal/networkmap/manager.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -22,20 +22,29 @@ import ( "github.com/hyperledger/firefly/internal/broadcast" "github.com/hyperledger/firefly/internal/i18n" "github.com/hyperledger/firefly/internal/identity" + "github.com/hyperledger/firefly/internal/syncasync" "github.com/hyperledger/firefly/pkg/database" "github.com/hyperledger/firefly/pkg/dataexchange" "github.com/hyperledger/firefly/pkg/fftypes" ) type Manager interface { - RegisterOrganization(ctx context.Context, org *fftypes.Organization, waitConfirm bool) (msg *fftypes.Message, err error) - RegisterNode(ctx context.Context, waitConfirm bool) (node *fftypes.Node, msg *fftypes.Message, err error) - RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (org *fftypes.Organization, msg *fftypes.Message, err error) - - GetOrganizationByID(ctx context.Context, id string) (*fftypes.Organization, error) - GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Organization, *database.FilterResult, error) - GetNodeByID(ctx context.Context, id string) (*fftypes.Node, error) - GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Node, *database.FilterResult, error) + RegisterOrganization(ctx context.Context, org *fftypes.IdentityCreateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) + RegisterNode(ctx context.Context, waitConfirm bool) (node *fftypes.Identity, err error) + RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (org *fftypes.Identity, err error) + RegisterIdentity(ctx context.Context, ns string, dto *fftypes.IdentityCreateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) + UpdateIdentity(ctx context.Context, ns string, id string, dto *fftypes.IdentityUpdateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) + + GetOrganizationByID(ctx context.Context, id string) (*fftypes.Identity, error) + GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) + GetNodeByID(ctx context.Context, id string) (*fftypes.Identity, error) + GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) + GetIdentityByID(ctx context.Context, ns string, id string) (*fftypes.Identity, error) + GetIdentities(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) + GetIdentityVerifiers(ctx context.Context, ns, id string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) + GetVerifiers(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) + GetVerifierByHash(ctx context.Context, ns, hash string) (*fftypes.Verifier, error) + GetDIDDocForIndentityByID(ctx context.Context, ns, id string) (*DIDDocument, error) } type networkMap struct { @@ -44,9 +53,10 @@ type networkMap struct { broadcast broadcast.Manager exchange dataexchange.Plugin identity identity.Manager + syncasync syncasync.Bridge } -func NewNetworkMap(ctx context.Context, di database.Plugin, bm broadcast.Manager, dx dataexchange.Plugin, im identity.Manager) (Manager, error) { +func NewNetworkMap(ctx context.Context, di database.Plugin, bm broadcast.Manager, dx dataexchange.Plugin, im identity.Manager, sa syncasync.Bridge) (Manager, error) { if di == nil || bm == nil || dx == nil || im == nil { return nil, i18n.NewError(ctx, i18n.MsgInitializationNilDepError) } @@ -57,6 +67,7 @@ func NewNetworkMap(ctx context.Context, di database.Plugin, bm broadcast.Manager broadcast: bm, exchange: dx, identity: im, + syncasync: sa, } return nm, nil } diff --git a/internal/networkmap/manager_test.go b/internal/networkmap/manager_test.go index 6988ed749a..03c078b400 100644 --- a/internal/networkmap/manager_test.go +++ b/internal/networkmap/manager_test.go @@ -25,6 +25,7 @@ import ( "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/mocks/syncasyncmocks" "github.com/stretchr/testify/assert" ) @@ -35,13 +36,14 @@ func newTestNetworkmap(t *testing.T) (*networkMap, func()) { mbm := &broadcastmocks.Manager{} mdx := &dataexchangemocks.Plugin{} mim := &identitymanagermocks.Manager{} - nm, err := NewNetworkMap(ctx, mdi, mbm, mdx, mim) + msa := &syncasyncmocks.Bridge{} + nm, err := NewNetworkMap(ctx, mdi, mbm, mdx, mim, msa) assert.NoError(t, err) return nm.(*networkMap), cancel } func TestNewNetworkMapMissingDep(t *testing.T) { - _, err := NewNetworkMap(context.Background(), nil, nil, nil, nil) + _, err := NewNetworkMap(context.Background(), nil, nil, nil, nil, nil) assert.Regexp(t, "FF10128", err) } diff --git a/internal/networkmap/register_identity.go b/internal/networkmap/register_identity.go new file mode 100644 index 0000000000..a2956abfe4 --- /dev/null +++ b/internal/networkmap/register_identity.go @@ -0,0 +1,124 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "context" + + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +func (nm *networkMap) RegisterIdentity(ctx context.Context, ns string, dto *fftypes.IdentityCreateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) { + + // Parse the input DTO + identity = &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Namespace: ns, + Name: dto.Name, + Type: dto.Type, + Parent: dto.Parent, + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: dto.Description, + Profile: dto.Profile, + }, + } + + // Set defaults + if identity.Namespace == fftypes.SystemNamespace || identity.Namespace == "" { + identity.Namespace = fftypes.SystemNamespace + if identity.Type == "" { + identity.Type = fftypes.IdentityTypeOrg + } + } else if identity.Type == "" { + identity.Type = fftypes.IdentityTypeCustom + } + + identity.DID, _ = identity.GenerateDID(ctx) + + // Verify the chain + immediateParent, _, err := nm.identity.VerifyIdentityChain(ctx, identity) + if err != nil { + return nil, err + } + + // Resolve if we need to perform a validation + var parentSigner *fftypes.SignerRef + if immediateParent != nil { + parentSigner, err = nm.identity.ResolveIdentitySigner(ctx, immediateParent) + if err != nil { + return nil, err + } + } + + // Determine claim signer + var claimSigner *fftypes.SignerRef + if dto.Type == fftypes.IdentityTypeNode { + // Nodes are special - as they need the claim to be signed directly by the parent + claimSigner = parentSigner + parentSigner = nil + } else { + if dto.Key == "" { + return nil, i18n.NewError(ctx, i18n.MsgBlockchainKeyNotSet) + } + claimSigner = &fftypes.SignerRef{ + Key: dto.Key, + } + claimSigner.Author = identity.DID + } + + if waitConfirm { + return nm.syncasync.WaitForIdentity(ctx, identity.Namespace, identity.ID, func(ctx context.Context) error { + return nm.sendIdentityRequest(ctx, identity, claimSigner, parentSigner) + }) + } + err = nm.sendIdentityRequest(ctx, identity, claimSigner, parentSigner) + if err != nil { + return nil, err + } + return identity, nil +} + +func (nm *networkMap) sendIdentityRequest(ctx context.Context, identity *fftypes.Identity, claimSigner *fftypes.SignerRef, parentSigner *fftypes.SignerRef) error { + + // Send the claim - we disable the check on the DID author here, as we are registering the identity so it will not exist + claimMsg, err := nm.broadcast.BroadcastIdentityClaim(ctx, identity.Namespace, &fftypes.IdentityClaim{ + Identity: identity, + }, claimSigner, fftypes.SystemTagIdentityClaim, false) + if err != nil { + return err + } + identity.Messages.Claim = claimMsg.Header.ID + + // Send the verification if one is required. + if parentSigner != nil { + verifyMsg, err := nm.broadcast.BroadcastDefinition(ctx, identity.Namespace, &fftypes.IdentityVerification{ + Claim: fftypes.MessageRef{ + ID: claimMsg.Header.ID, + Hash: claimMsg.Hash, + }, + Identity: identity.IdentityBase, + }, parentSigner, fftypes.SystemTagIdentityVerification, false) + if err != nil { + return err + } + identity.Messages.Verification = verifyMsg.Header.ID + } + return nil +} diff --git a/internal/networkmap/register_identity_test.go b/internal/networkmap/register_identity_test.go new file mode 100644 index 0000000000..1ae41c53a7 --- /dev/null +++ b/internal/networkmap/register_identity_test.go @@ -0,0 +1,257 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "context" + "fmt" + "testing" + + "github.com/hyperledger/firefly/internal/syncasync" + "github.com/hyperledger/firefly/mocks/broadcastmocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/mocks/syncasyncmocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestRegisterIdentityOrgWithParentOk(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + parentIdentity := testOrg("parent1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentIdentity, false, nil) + mim.On("ResolveIdentitySigner", nm.ctx, parentIdentity).Return(&fftypes.SignerRef{ + Key: "0x23456", + }, nil) + + mockMsg1 := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mockMsg2 := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mbm := nm.broadcast.(*broadcastmocks.Manager) + + mbm.On("BroadcastIdentityClaim", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityClaim"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityClaim, false).Return(mockMsg1, nil) + + mbm.On("BroadcastDefinition", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityVerification"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x23456" + }), + fftypes.SystemTagIdentityVerification, false).Return(mockMsg2, nil) + + org, err := nm.RegisterIdentity(nm.ctx, fftypes.SystemNamespace, &fftypes.IdentityCreateDTO{ + Name: "child1", + Key: "0x12345", + Parent: fftypes.NewUUID(), + }, false) + assert.NoError(t, err) + assert.Equal(t, *mockMsg1.Header.ID, *org.Messages.Claim) + assert.Equal(t, *mockMsg2.Header.ID, *org.Messages.Verification) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) +} + +func TestRegisterIdentityOrgWithParentWaitConfirmOk(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + parentIdentity := testOrg("parent1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentIdentity, false, nil) + mim.On("ResolveIdentitySigner", nm.ctx, parentIdentity).Return(&fftypes.SignerRef{ + Key: "0x23456", + }, nil) + + msa := nm.syncasync.(*syncasyncmocks.Bridge) + msa.On("WaitForIdentity", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) { + ctx := args[0].(context.Context) + ns := args[1].(string) + id := args[2].(*fftypes.UUID) + assert.Equal(t, parentIdentity.Namespace, ns) + assert.NotNil(t, id) + cb := args[3].(syncasync.RequestSender) + err := cb(ctx) + assert.NoError(t, err) + }).Return(nil, nil) + + mockMsg1 := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mockMsg2 := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mbm := nm.broadcast.(*broadcastmocks.Manager) + + mbm.On("BroadcastIdentityClaim", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityClaim"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityClaim, false).Return(mockMsg1, nil) + + mbm.On("BroadcastDefinition", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityVerification"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x23456" + }), + fftypes.SystemTagIdentityVerification, false).Return(mockMsg2, nil) + + _, err := nm.RegisterIdentity(nm.ctx, fftypes.SystemNamespace, &fftypes.IdentityCreateDTO{ + Name: "child1", + Key: "0x12345", + Parent: fftypes.NewUUID(), + }, true) + assert.NoError(t, err) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) + msa.AssertExpectations(t) +} + +func TestRegisterIdentityCustomWithParentFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + parentIdentity := testOrg("parent1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentIdentity, false, nil) + mim.On("ResolveIdentitySigner", nm.ctx, parentIdentity).Return(&fftypes.SignerRef{ + Key: "0x23456", + }, nil) + + mockMsg := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mbm := nm.broadcast.(*broadcastmocks.Manager) + + mbm.On("BroadcastIdentityClaim", nm.ctx, + "ns1", + mock.AnythingOfType("*fftypes.IdentityClaim"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityClaim, false).Return(mockMsg, nil) + + mbm.On("BroadcastDefinition", nm.ctx, + "ns1", + mock.AnythingOfType("*fftypes.IdentityVerification"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x23456" + }), + fftypes.SystemTagIdentityVerification, false).Return(nil, fmt.Errorf("pop")) + + _, err := nm.RegisterIdentity(nm.ctx, "ns1", &fftypes.IdentityCreateDTO{ + Name: "custom1", + Key: "0x12345", + Parent: fftypes.NewUUID(), + }, false) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) +} + +func TestRegisterIdentityGetParentMsgFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + parentIdentity := testOrg("parent1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentIdentity, false, nil) + mim.On("ResolveIdentitySigner", nm.ctx, parentIdentity).Return(nil, fmt.Errorf("pop")) + + _, err := nm.RegisterIdentity(nm.ctx, "ns1", &fftypes.IdentityCreateDTO{ + Name: "custom1", + Key: "0x12345", + Parent: fftypes.NewUUID(), + }, false) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) +} + +func TestRegisterIdentityRootBroadcastFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(nil, false, nil) + + mbm := nm.broadcast.(*broadcastmocks.Manager) + mbm.On("BroadcastIdentityClaim", nm.ctx, + "ns1", + mock.AnythingOfType("*fftypes.IdentityClaim"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityClaim, false).Return(nil, fmt.Errorf("pop")) + + _, err := nm.RegisterIdentity(nm.ctx, "ns1", &fftypes.IdentityCreateDTO{ + Name: "custom1", + Key: "0x12345", + }, false) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) +} + +func TestRegisterIdentityMissingKey(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(nil, false, nil) + + _, err := nm.RegisterIdentity(nm.ctx, "ns1", &fftypes.IdentityCreateDTO{ + Name: "custom1", + }, false) + assert.Regexp(t, "FF10352", err) + + mim.AssertExpectations(t) +} + +func TestRegisterIdentityVerifyFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(nil, false, fmt.Errorf("pop")) + + _, err := nm.RegisterIdentity(nm.ctx, "ns1", &fftypes.IdentityCreateDTO{ + Name: "custom1", + }, false) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) +} diff --git a/internal/networkmap/register_node.go b/internal/networkmap/register_node.go index 64e02be401..f860f86b01 100644 --- a/internal/networkmap/register_node.go +++ b/internal/networkmap/register_node.go @@ -21,51 +21,35 @@ import ( "fmt" "github.com/hyperledger/firefly/internal/config" - "github.com/hyperledger/firefly/internal/i18n" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (nm *networkMap) RegisterNode(ctx context.Context, waitConfirm bool) (node *fftypes.Node, msg *fftypes.Message, err error) { +func (nm *networkMap) RegisterNode(ctx context.Context, waitConfirm bool) (identity *fftypes.Identity, err error) { - localOrgSigningKey, err := nm.getLocalOrgSigningKey(ctx) + nodeOwningOrg, err := nm.identity.GetNodeOwnerOrg(ctx) if err != nil { - return nil, nil, err + return nil, err } - node = &fftypes.Node{ - ID: fftypes.NewUUID(), - Created: fftypes.Now(), - Owner: localOrgSigningKey, // TODO: Switch hierarchy to DID based, not signing key. Introducing an intermediate identity object - Name: config.GetString(config.NodeName), - Description: config.GetString(config.NodeDescription), + nodeRequest := &fftypes.IdentityCreateDTO{ + Parent: nodeOwningOrg.ID, + Name: config.GetString(config.NodeName), + Type: fftypes.IdentityTypeNode, + IdentityProfile: fftypes.IdentityProfile{ + Description: config.GetString(config.NodeDescription), + }, } - if node.Name == "" { - orgName := config.GetString(config.OrgName) - if orgName != "" { - node.Name = fmt.Sprintf("%s.node", orgName) + if nodeRequest.Name == "" { + if nodeOwningOrg.Name != "" { + nodeRequest.Name = fmt.Sprintf("%s.node", nodeOwningOrg.Name) } } - if node.Owner == "" || node.Name == "" { - return nil, nil, i18n.NewError(ctx, i18n.MsgNodeAndOrgIDMustBeSet) - } - - node.DX, err = nm.exchange.GetEndpointInfo(ctx) - if err != nil { - return nil, nil, err - } - err = node.Validate(ctx, false) + dxInfo, err := nm.exchange.GetEndpointInfo(ctx) if err != nil { - return nil, nil, err + return nil, err } + nodeRequest.Profile = dxInfo - if err = nm.findOrgsToRoot(ctx, "node", node.Name, node.Owner); err != nil { - return nil, nil, err - } - - msg, err = nm.broadcast.BroadcastDefinitionAsNode(ctx, fftypes.SystemNamespace, node, fftypes.SystemTagDefineNode, waitConfirm) - if msg != nil { - node.Message = msg.Header.ID - } - return node, msg, err + return nm.RegisterIdentity(ctx, fftypes.SystemNamespace, nodeRequest, waitConfirm) } diff --git a/internal/networkmap/register_node_test.go b/internal/networkmap/register_node_test.go index fa1af98eab..5c24f5e210 100644 --- a/internal/networkmap/register_node_test.go +++ b/internal/networkmap/register_node_test.go @@ -22,7 +22,6 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/mocks/broadcastmocks" - "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/dataexchangemocks" "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/pkg/fftypes" @@ -39,169 +38,68 @@ func TestRegisterNodeOk(t *testing.T) { config.Set(config.OrgName, "org1") config.Set(config.NodeDescription, "Node 1") - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(&fftypes.Organization{ - Identity: "0x23456", - Description: "owning organization", - }, nil) + parentOrg := testOrg("org1") mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("0x23456", nil) + mim.On("GetNodeOwnerOrg", nm.ctx).Return(parentOrg, nil) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentOrg, false, nil) + signerRef := &fftypes.SignerRef{Key: "0x23456"} + mim.On("ResolveIdentitySigner", nm.ctx, parentOrg).Return(signerRef, nil) mdx := nm.exchange.(*dataexchangemocks.Plugin) - mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"endpoint": "details"}, + mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.JSONObject{ + "id": "peer1", + "endpoint": "details", }, nil) mockMsg := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} mbm := nm.broadcast.(*broadcastmocks.Manager) - mbm.On("BroadcastDefinitionAsNode", nm.ctx, fftypes.SystemNamespace, mock.Anything, fftypes.SystemTagDefineNode, true).Return(mockMsg, nil) + mbm.On("BroadcastIdentityClaim", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityClaim"), + signerRef, + fftypes.SystemTagIdentityClaim, false).Return(mockMsg, nil) - node, msg, err := nm.RegisterNode(nm.ctx, true) + node, err := nm.RegisterNode(nm.ctx, false) assert.NoError(t, err) - assert.Equal(t, mockMsg, msg) - assert.Equal(t, *mockMsg.Header.ID, *node.Message) + assert.Equal(t, *mockMsg.Header.ID, *node.Messages.Claim) } -func TestRegisterNodeBadParentID(t *testing.T) { +func TestRegisterNodePeerInfoFail(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() config.Set(config.OrgKey, "0x23456") + config.Set(config.OrgName, "org1") config.Set(config.NodeDescription, "Node 1") - config.Set(config.NodeName, "node1") - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(&fftypes.Organization{ - Identity: "0x23456", - Description: "owning organization", - }, nil) + parentOrg := testOrg("org1") mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("", fmt.Errorf("pop")) + mim.On("GetNodeOwnerOrg", nm.ctx).Return(parentOrg, nil) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(parentOrg, false, nil) + signerRef := &fftypes.SignerRef{Key: "0x23456"} + mim.On("ResolveIdentitySigner", nm.ctx, parentOrg).Return(signerRef, nil) mdx := nm.exchange.(*dataexchangemocks.Plugin) - mdx.On("GetEndpointInfo", nm.ctx).Return("peer1", fftypes.JSONObject{"endpoint": "details"}, nil) + mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.JSONObject{}, fmt.Errorf("pop")) - _, _, err := nm.RegisterNode(nm.ctx, false) + _, err := nm.RegisterNode(nm.ctx, false) assert.Regexp(t, "pop", err) } -func TestRegisterNodeMissingNodeName(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - config.Set(config.OrgKey, "0x23456") - config.Set(config.NodeDescription, "Node 1") - - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(&fftypes.Organization{ - Identity: "0x23456", - Description: "owning organization", - }, nil) - - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("0x23456", nil) - - _, _, err := nm.RegisterNode(nm.ctx, false) - assert.Regexp(t, "FF10216", err) - -} -func TestRegisterNodeBadNodeID(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - config.Set(config.NodeDescription, "Node 1") - config.Set(config.NodeName, "node1") - - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(&fftypes.Organization{ - Identity: "0x23456", - Description: "owning organization", - }, nil) - - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "").Return("", nil) - - _, _, err := nm.RegisterNode(nm.ctx, false) - assert.Regexp(t, "FF10216", err) - -} - -func TestRegisterNodeParentNotFound(t *testing.T) { +func TestRegisterNodeGetOwnerFail(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - config.Set(config.OrgKey, "0x23456") - config.Set(config.NodeDescription, "Node 1") - config.Set(config.NodeName, "node1") - - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(nil, nil) - - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("0x23456", nil) - - mdx := nm.exchange.(*dataexchangemocks.Plugin) - mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"endpoint": "details"}, - }, nil) - - _, _, err := nm.RegisterNode(nm.ctx, false) - assert.Regexp(t, "FF10214", err) - -} - -func TestRegisterNodeParentBadNode(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - config.Set(config.OrgKey, "0x23456") - config.Set(config.NodeDescription, string(make([]byte, 4097))) - config.Set(config.NodeName, "node1") - - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("0x23456", nil) - - mdx := nm.exchange.(*dataexchangemocks.Plugin) - mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.DXInfo{ - Peer: "peer1", - Endpoint: fftypes.JSONObject{"endpoint": "details"}, - }, nil) - - _, _, err := nm.RegisterNode(nm.ctx, false) - assert.Regexp(t, "FF10188", err) - -} - -func TestRegisterNodeParentDXEndpointFail(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - config.Set(config.OrgKey, "0x23456") - config.Set(config.NodeDescription, string(make([]byte, 4097))) - config.Set(config.NodeName, "node1") - - mdx := nm.exchange.(*dataexchangemocks.Plugin) - mdx.On("GetEndpointInfo", nm.ctx).Return(fftypes.DXInfo{ - Peer: "", - Endpoint: nil, - }, fmt.Errorf("pop")) - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x23456").Return("0x23456", nil) + mim.On("GetNodeOwnerOrg", nm.ctx).Return(nil, fmt.Errorf("pop")) - _, _, err := nm.RegisterNode(nm.ctx, false) + _, err := nm.RegisterNode(nm.ctx, false) assert.Regexp(t, "pop", err) } diff --git a/internal/networkmap/register_org.go b/internal/networkmap/register_org.go index d3fa61f787..de56824bd9 100644 --- a/internal/networkmap/register_org.go +++ b/internal/networkmap/register_org.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -21,93 +21,31 @@ import ( "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/internal/i18n" - "github.com/hyperledger/firefly/internal/log" "github.com/hyperledger/firefly/pkg/fftypes" ) -func (nm *networkMap) findOrgsToRoot(ctx context.Context, idType, identity, parent string) (err error) { - - var root *fftypes.Organization - for parent != "" { - root, err = nm.database.GetOrganizationByIdentity(ctx, parent) - if err != nil { - return err - } - if root == nil { - return i18n.NewError(ctx, i18n.MsgParentIdentityNotFound, parent, idType, identity) - } - parent = root.Parent - } - return err -} - -func (nm *networkMap) getLocalOrgSigningKey(ctx context.Context) (localOrgSigningKey string, err error) { - localOrgSigningKey = config.GetString(config.OrgKey) - if localOrgSigningKey == "" { - log.L(ctx).Warnf("The %s config key has been deprecated. Use %s instead.", config.OrgIdentityDeprecated, config.OrgKey) - localOrgSigningKey = config.GetString(config.OrgIdentityDeprecated) - } - localOrgSigningKey, err = nm.identity.ResolveSigningKey(ctx, localOrgSigningKey) - if err != nil { - return "", err - } - if localOrgSigningKey == "" { - return "", i18n.NewError(ctx, i18n.MsgNodeAndOrgIDMustBeSet) - } - return localOrgSigningKey, nil -} - // RegisterNodeOrganization is a convenience helper to register the org configured on the node, without any extra info -func (nm *networkMap) RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (org *fftypes.Organization, msg *fftypes.Message, err error) { +func (nm *networkMap) RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (*fftypes.Identity, error) { - localOrgSigningKey, err := nm.getLocalOrgSigningKey(ctx) + key, err := nm.identity.GetNodeOwnerBlockchainKey(ctx) if err != nil { - return nil, nil, err + return nil, err } - org = &fftypes.Organization{ - Name: config.GetString(config.OrgName), - Identity: localOrgSigningKey, // TODO: Switch hierarchy to DID based, not signing key. Introducing an intermediate identity object - Description: config.GetString(config.OrgDescription), - } - if org.Identity == "" || org.Name == "" { - return nil, nil, i18n.NewError(ctx, i18n.MsgNodeAndOrgIDMustBeSet) + orgRequest := &fftypes.IdentityCreateDTO{ + Name: config.GetString(config.OrgName), + IdentityProfile: fftypes.IdentityProfile{ + Description: config.GetString(config.OrgDescription), + }, + Key: key.Value, } - msg, err = nm.RegisterOrganization(ctx, org, waitConfirm) - if msg != nil { - org.Message = msg.Header.ID + if orgRequest.Name == "" { + return nil, i18n.NewError(ctx, i18n.MsgNodeAndOrgIDMustBeSet) } - return org, msg, err + return nm.RegisterOrganization(ctx, orgRequest, waitConfirm) } -func (nm *networkMap) RegisterOrganization(ctx context.Context, org *fftypes.Organization, waitConfirm bool) (*fftypes.Message, error) { - - err := org.Validate(ctx, false) - if err != nil { - return nil, err - } - org.ID = fftypes.NewUUID() - org.Created = fftypes.Now() - - // If we're a root identity, we self-sign - signingIdentityString := org.Identity - if org.Parent != "" { - // Check the identity itself is ok - if err = nm.identity.ResolveInputIdentity(ctx, &fftypes.Identity{ - Key: signingIdentityString, - }); err != nil { - return nil, err - } - - // Otherwise we must have access to the signing key of the parent, and the parents - // must already have been broadcast to the network - signingIdentityString = org.Parent - if err = nm.findOrgsToRoot(ctx, "organization", org.Identity, signingIdentityString); err != nil { - return nil, err - } - } - - return nm.broadcast.BroadcastRootOrgDefinition(ctx, org, &fftypes.Identity{ - Key: signingIdentityString, - }, fftypes.SystemTagDefineOrganization, waitConfirm) +func (nm *networkMap) RegisterOrganization(ctx context.Context, orgRequest *fftypes.IdentityCreateDTO, waitConfirm bool) (*fftypes.Identity, error) { + orgRequest.Type = fftypes.IdentityTypeOrg + return nm.RegisterIdentity(ctx, fftypes.SystemNamespace, orgRequest, waitConfirm) } diff --git a/internal/networkmap/register_org_test.go b/internal/networkmap/register_org_test.go index 066616fc45..7cd584d249 100644 --- a/internal/networkmap/register_org_test.go +++ b/internal/networkmap/register_org_test.go @@ -17,167 +17,101 @@ package networkmap import ( + "context" "fmt" "testing" "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/mocks/broadcastmocks" - "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func TestRegisterOrganizationChildOk(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(&fftypes.Organization{ - Identity: "0x23456", - Description: "parent organization", - }, nil) - - mim := nm.identity.(*identitymanagermocks.Manager) - parentID := &fftypes.Identity{Key: "0x23456"} - mim.On("ResolveInputIdentity", nm.ctx, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "0x12345" })).Return(nil) - - mockMsg := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} - mbm := nm.broadcast.(*broadcastmocks.Manager) - mbm.On("BroadcastRootOrgDefinition", nm.ctx, mock.Anything, parentID, fftypes.SystemTagDefineOrganization, false).Return(mockMsg, nil) - - msg, err := nm.RegisterOrganization(nm.ctx, &fftypes.Organization{ - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - Description: "my organization", - }, false) - assert.NoError(t, err) - assert.Equal(t, mockMsg, msg) - - mim.AssertExpectations(t) +func testOrg(name string) *fftypes.Identity { + i := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeOrg, + Namespace: fftypes.SystemNamespace, + Name: name, + }, + IdentityProfile: fftypes.IdentityProfile{ + Description: "desc", + Profile: fftypes.JSONObject{ + "some": "profiledata", + }, + }, + Messages: fftypes.IdentityMessages{ + Claim: fftypes.NewUUID(), + }, + } + i.DID, _ = i.GenerateDID(context.Background()) + return i } -func TestRegisterNodeOrganizationRootOk(t *testing.T) { +func TestRegisterNodeOrgOk(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - config.Set(config.OrgIdentityDeprecated, "0x12345") config.Set(config.OrgName, "org1") - config.Set(config.OrgDescription, "my organization") + config.Set(config.NodeDescription, "Node 1") mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x12345").Return("0x12345", nil) - mim.On("ResolveInputIdentity", nm.ctx, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "0x12345" })).Return(nil) + mim.On("GetNodeOwnerBlockchainKey", nm.ctx).Return(&fftypes.VerifierRef{ + Value: "0x12345", + }, nil) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(nil, false, nil) mockMsg := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} mbm := nm.broadcast.(*broadcastmocks.Manager) - mbm.On("BroadcastRootOrgDefinition", nm.ctx, mock.Anything, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "0x12345" }), fftypes.SystemTagDefineOrganization, true).Return(mockMsg, nil) - - org, msg, err := nm.RegisterNodeOrganization(nm.ctx, true) + mbm.On("BroadcastIdentityClaim", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityClaim"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityClaim, false).Return(mockMsg, nil) + + org, err := nm.RegisterNodeOrganization(nm.ctx, false) assert.NoError(t, err) - assert.Equal(t, mockMsg, msg) - assert.Equal(t, *mockMsg.Header.ID, *org.Message) - -} - -func TestRegisterNodeOrganizationMissingOrgKey(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "").Return("", nil) - - _, _, err := nm.RegisterNodeOrganization(nm.ctx, true) - assert.Regexp(t, "FF10216", err) + assert.Equal(t, *mockMsg.Header.ID, *org.Messages.Claim) } -func TestRegisterNodeOrganizationMissingName(t *testing.T) { +func TestRegisterNodeOrgNoName(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - config.Set(config.OrgKey, "0x2345") + config.Set(config.OrgName, "") + config.Set(config.NodeDescription, "") mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveSigningKey", nm.ctx, "0x2345").Return("0x2345", nil) + mim.On("GetNodeOwnerBlockchainKey", nm.ctx).Return(&fftypes.VerifierRef{ + Value: "0x12345", + }, nil) + mim.On("VerifyIdentityChain", nm.ctx, mock.AnythingOfType("*fftypes.Identity")).Return(nil, false, nil) - _, _, err := nm.RegisterNodeOrganization(nm.ctx, true) + _, err := nm.RegisterNodeOrganization(nm.ctx, false) assert.Regexp(t, "FF10216", err) } -func TestRegisterOrganizationBadObject(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() - - _, err := nm.RegisterOrganization(nm.ctx, &fftypes.Organization{ - Name: "org1", - Description: string(make([]byte, 4097)), - }, false) - assert.Regexp(t, "FF10188", err) - -} - -func TestRegisterOrganizationBadIdentity(t *testing.T) { +func TestRegisterNodeGetOwnerBlockchainKeyFail(t *testing.T) { nm, cancel := newTestNetworkmap(t) defer cancel() - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", nm.ctx, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "wrongun" })).Return(fmt.Errorf("pop")) - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "wrongun").Return(nil, nil) - - _, err := nm.RegisterOrganization(nm.ctx, &fftypes.Organization{ - Name: "org1", - Identity: "wrongun", - Parent: "ok", - }, false) - assert.Regexp(t, "pop", err) - -} - -func TestRegisterOrganizationBadParent(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() + config.Set(config.OrgName, "") + config.Set(config.NodeDescription, "") mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", nm.ctx, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "0x12345" })).Return(nil) - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "wrongun").Return(nil, nil) - - _, err := nm.RegisterOrganization(nm.ctx, &fftypes.Organization{ - Name: "org1", - Identity: "0x12345", - Parent: "wrongun", - }, false) - assert.Regexp(t, "FF10214", err) - -} - -func TestRegisterOrganizationParentLookupFail(t *testing.T) { - - nm, cancel := newTestNetworkmap(t) - defer cancel() + mim.On("GetNodeOwnerBlockchainKey", nm.ctx).Return(nil, fmt.Errorf("pop")) - mim := nm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", nm.ctx, mock.MatchedBy(func(i *fftypes.Identity) bool { return i.Key == "0x12345" })).Return(nil) - mdi := nm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByIdentity", nm.ctx, "0x23456").Return(nil, fmt.Errorf("pop")) - - _, err := nm.RegisterOrganization(nm.ctx, &fftypes.Organization{ - Name: "org1", - Identity: "0x12345", - Parent: "0x23456", - }, false) + _, err := nm.RegisterNodeOrganization(nm.ctx, false) assert.Regexp(t, "pop", err) } diff --git a/internal/networkmap/update_identity.go b/internal/networkmap/update_identity.go new file mode 100644 index 0000000000..9751e23a41 --- /dev/null +++ b/internal/networkmap/update_identity.go @@ -0,0 +1,67 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "context" + + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/pkg/fftypes" +) + +func (nm *networkMap) UpdateIdentity(ctx context.Context, ns, uuidStr string, dto *fftypes.IdentityUpdateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) { + id, err := fftypes.ParseUUID(ctx, uuidStr) + if err != nil { + return nil, err + } + return nm.updateIdentityID(ctx, ns, id, dto, waitConfirm) +} + +func (nm *networkMap) updateIdentityID(ctx context.Context, ns string, id *fftypes.UUID, dto *fftypes.IdentityUpdateDTO, waitConfirm bool) (identity *fftypes.Identity, err error) { + + // Get the original identity + identity, err = nm.identity.CachedIdentityLookupByID(ctx, id) + if err != nil { + return nil, err + } + if identity == nil || identity.Namespace != ns { + return nil, i18n.NewError(ctx, i18n.Msg404NoResult) + } + + // Resolve the signer of the original claim + updateSigner, err := nm.identity.ResolveIdentitySigner(ctx, identity) + if err != nil { + return nil, err + } + + identity.IdentityProfile = dto.IdentityProfile + if err := identity.Validate(ctx); err != nil { + return nil, err + } + + // Send the update + updateMsg, err := nm.broadcast.BroadcastDefinition(ctx, identity.Namespace, &fftypes.IdentityUpdate{ + Identity: identity.IdentityBase, + Updates: dto.IdentityProfile, + }, updateSigner, fftypes.SystemTagIdentityUpdate, waitConfirm) + if err != nil { + return nil, err + } + identity.Messages.Update = updateMsg.Header.ID + + return identity, err +} diff --git a/internal/networkmap/update_identity_test.go b/internal/networkmap/update_identity_test.go new file mode 100644 index 0000000000..4310756bb2 --- /dev/null +++ b/internal/networkmap/update_identity_test.go @@ -0,0 +1,194 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package networkmap + +import ( + "fmt" + "testing" + + "github.com/hyperledger/firefly/mocks/broadcastmocks" + "github.com/hyperledger/firefly/mocks/identitymanagermocks" + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +func TestUpdateIdentityProfileOk(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(identity, nil) + signerRef := &fftypes.SignerRef{Key: "0x12345"} + mim.On("ResolveIdentitySigner", nm.ctx, identity).Return(signerRef, nil) + + mockMsg1 := &fftypes.Message{Header: fftypes.MessageHeader{ID: fftypes.NewUUID()}} + mbm := nm.broadcast.(*broadcastmocks.Manager) + + mbm.On("BroadcastDefinition", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityUpdate"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityUpdate, true).Return(mockMsg1, nil) + + org, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: "new desc", + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.NoError(t, err) + assert.Equal(t, *mockMsg1.Header.ID, *org.Messages.Update) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) +} + +func TestUpdateIdentityProfileBroadcastFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(identity, nil) + signerRef := &fftypes.SignerRef{Key: "0x12345"} + mim.On("ResolveIdentitySigner", nm.ctx, identity).Return(signerRef, nil) + + mbm := nm.broadcast.(*broadcastmocks.Manager) + mbm.On("BroadcastDefinition", nm.ctx, + fftypes.SystemNamespace, + mock.AnythingOfType("*fftypes.IdentityUpdate"), + mock.MatchedBy(func(sr *fftypes.SignerRef) bool { + return sr.Key == "0x12345" + }), + fftypes.SystemTagIdentityUpdate, true).Return(nil, fmt.Errorf("pop")) + + _, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: "new desc", + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) + mbm.AssertExpectations(t) +} + +func TestUpdateIdentityProfileBadProfile(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(identity, nil) + signerRef := &fftypes.SignerRef{Key: "0x12345"} + mim.On("ResolveIdentitySigner", nm.ctx, identity).Return(signerRef, nil) + + _, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: string(make([]byte, 4097)), + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.Regexp(t, "FF10188", err) + + mim.AssertExpectations(t) +} + +func TestUpdateIdentityProfileNotFound(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(nil, nil) + + _, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: string(make([]byte, 4097)), + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.Regexp(t, "FF10143", err) + + mim.AssertExpectations(t) +} + +func TestUpdateIdentityProfileLookupFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(nil, fmt.Errorf("pop")) + + _, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: string(make([]byte, 4097)), + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) +} + +func TestUpdateIdentityProfileClaimLookupFail(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + identity := testOrg("org1") + + mim := nm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", nm.ctx, identity.ID).Return(identity, nil) + signerRef := &fftypes.SignerRef{Key: "0x12345"} + mim.On("ResolveIdentitySigner", nm.ctx, identity).Return(signerRef, fmt.Errorf("pop")) + + _, err := nm.UpdateIdentity(nm.ctx, identity.Namespace, identity.ID.String(), &fftypes.IdentityUpdateDTO{ + IdentityProfile: fftypes.IdentityProfile{ + Description: "Desc1", + Profile: fftypes.JSONObject{"new": "profile"}, + }, + }, true) + assert.Regexp(t, "pop", err) + + mim.AssertExpectations(t) +} + +func TestUpdateIdentityProfileBadID(t *testing.T) { + + nm, cancel := newTestNetworkmap(t) + defer cancel() + + _, err := nm.UpdateIdentity(nm.ctx, "ns1", "badness", &fftypes.IdentityUpdateDTO{}, true) + assert.Regexp(t, "FF10142", err) +} diff --git a/internal/orchestrator/bound_callbacks.go b/internal/orchestrator/bound_callbacks.go index 3b277d98d6..217d62eb0e 100644 --- a/internal/orchestrator/bound_callbacks.go +++ b/internal/orchestrator/bound_callbacks.go @@ -38,8 +38,8 @@ func (bc *boundCallbacks) TokenOpUpdate(plugin tokens.Plugin, operationID *fftyp return bc.ei.OperationUpdate(plugin, operationID, txState, blockchainTXID, errorMessage, opOutput) } -func (bc *boundCallbacks) BatchPinComplete(batch *blockchain.BatchPin, signingIdentity string) error { - return bc.ei.BatchPinComplete(bc.bi, batch, signingIdentity) +func (bc *boundCallbacks) BatchPinComplete(batch *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { + return bc.ei.BatchPinComplete(bc.bi, batch, signingKey) } func (bc *boundCallbacks) TransferResult(trackingID string, status fftypes.OpStatus, update fftypes.TransportStatusUpdate) error { diff --git a/internal/orchestrator/bound_callbacks_test.go b/internal/orchestrator/bound_callbacks_test.go index db4754033b..94e1dfa375 100644 --- a/internal/orchestrator/bound_callbacks_test.go +++ b/internal/orchestrator/bound_callbacks_test.go @@ -46,8 +46,8 @@ func TestBoundCallbacks(t *testing.T) { hash := fftypes.NewRandB32() opID := fftypes.NewUUID() - mei.On("BatchPinComplete", mbi, batch, "0x12345").Return(fmt.Errorf("pop")) - err := bc.BatchPinComplete(batch, "0x12345") + mei.On("BatchPinComplete", mbi, batch, &fftypes.VerifierRef{Value: "0x12345", Type: fftypes.VerifierTypeEthAddress}).Return(fmt.Errorf("pop")) + err := bc.BatchPinComplete(batch, &fftypes.VerifierRef{Value: "0x12345", Type: fftypes.VerifierTypeEthAddress}) assert.EqualError(t, err, "pop") mei.On("OperationUpdate", mbi, opID, fftypes.OpStatusFailed, "0xffffeeee", "error info", info).Return(fmt.Errorf("pop")) diff --git a/internal/orchestrator/orchestrator.go b/internal/orchestrator/orchestrator.go index b7fae5f0a6..05b07458c1 100644 --- a/internal/orchestrator/orchestrator.go +++ b/internal/orchestrator/orchestrator.go @@ -319,13 +319,17 @@ func (or *orchestrator) initDataExchange(ctx context.Context) (err error) { } } - nodes, _, err := or.database.GetNodes(ctx, database.NodeQueryFactory.NewFilter(ctx).And()) + fb := database.IdentityQueryFactory.NewFilter(ctx) + nodes, _, err := or.database.GetIdentities(ctx, fb.And( + fb.Eq("type", fftypes.IdentityTypeNode), + fb.Eq("namespace", fftypes.SystemNamespace), + )) if err != nil { return err } - nodeInfo := make([]fftypes.DXInfo, len(nodes)) + nodeInfo := make([]fftypes.JSONObject, len(nodes)) for i, node := range nodes { - nodeInfo[i] = node.DX + nodeInfo[i] = node.Profile } return or.dataexchange.Init(ctx, dataexchangeConfig.SubPrefix(dxPlugin), nodeInfo, &or.bc) @@ -422,15 +426,15 @@ func (or *orchestrator) initComponents(ctx context.Context) (err error) { or.metrics = metrics.NewMetricsManager(ctx) } - if or.identity == nil { - or.identity, err = identity.NewIdentityManager(ctx, or.database, or.identityPlugin, or.blockchain) + if or.data == nil { + or.data, err = data.NewDataManager(ctx, or.database, or.publicstorage, or.dataexchange) if err != nil { return err } } - if or.data == nil { - or.data, err = data.NewDataManager(ctx, or.database, or.publicstorage, or.dataexchange) + if or.identity == nil { + or.identity, err = identity.NewIdentityManager(ctx, or.database, or.identityPlugin, or.blockchain, or.data) if err != nil { return err } @@ -472,17 +476,17 @@ func (or *orchestrator) initComponents(ctx context.Context) (err error) { } } - or.definitions = definitions.NewDefinitionHandlers(or.database, or.dataexchange, or.data, or.broadcast, or.messaging, or.assets, or.contracts) + or.definitions = definitions.NewDefinitionHandlers(or.database, or.blockchain, or.dataexchange, or.data, or.identity, or.broadcast, or.messaging, or.assets, or.contracts) if or.events == nil { - or.events, err = events.NewEventManager(ctx, or, or.publicstorage, or.database, or.identity, or.definitions, or.data, or.broadcast, or.messaging, or.assets, or.metrics) + or.events, err = events.NewEventManager(ctx, or, or.publicstorage, or.database, or.blockchain, or.identity, or.definitions, or.data, or.broadcast, or.messaging, or.assets, or.metrics) if err != nil { return err } } if or.networkmap == nil { - or.networkmap, err = networkmap.NewNetworkMap(ctx, or.database, or.broadcast, or.dataexchange, or.identity) + or.networkmap, err = networkmap.NewNetworkMap(ctx, or.database, or.broadcast, or.dataexchange, or.identity, or.syncasync) if err != nil { return err } diff --git a/internal/orchestrator/orchestrator_test.go b/internal/orchestrator/orchestrator_test.go index 74f2f4456a..29c960e8e6 100644 --- a/internal/orchestrator/orchestrator_test.go +++ b/internal/orchestrator/orchestrator_test.go @@ -277,7 +277,7 @@ func TestBadDataExchangeInitFail(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) ctx, cancelCtx := context.WithCancel(context.Background()) err := or.Init(ctx, cancelCtx) @@ -295,7 +295,7 @@ func TestDataExchangePluginOldName(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, fmt.Errorf("pop")) ctx, cancelCtx := context.WithCancel(context.Background()) err := or.Init(ctx, cancelCtx) @@ -314,7 +314,7 @@ func TestBadTokensPlugin(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -336,7 +336,7 @@ func TestBadTokensPluginNoConnector(t *testing.T) { or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mbi.On("VerifyIdentitySyntax", mock.Anything, mock.Anything, mock.Anything).Return("", nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -357,7 +357,7 @@ func TestBadTokensPluginNoName(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -378,7 +378,7 @@ func TestBadTokensPluginInvalidName(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -400,7 +400,7 @@ func TestBadTokensPluginNoType(t *testing.T) { or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mbi.On("VerifyIdentitySyntax", mock.Anything, mock.Anything, mock.Anything).Return("", nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -423,7 +423,7 @@ func TestGoodTokensPlugin(t *testing.T) { or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -621,7 +621,7 @@ func TestInitOK(t *testing.T) { or.mii.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mbi.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mps.On("Init", mock.Anything, mock.Anything, mock.Anything).Return(nil) - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) or.mdi.On("GetNamespace", mock.Anything, mock.Anything).Return(nil, nil) or.mdi.On("UpsertNamespace", mock.Anything, mock.Anything, true).Return(nil) @@ -648,7 +648,7 @@ func TestInitOK(t *testing.T) { func TestInitDataExchangeGetNodesFail(t *testing.T) { or := newTestOrchestrator() - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) err := or.initDataExchange(or.ctx) assert.EqualError(t, err, "pop") @@ -657,7 +657,7 @@ func TestInitDataExchangeGetNodesFail(t *testing.T) { func TestInitDataExchangeWithNodes(t *testing.T) { or := newTestOrchestrator() - or.mdi.On("GetNodes", mock.Anything, mock.Anything).Return([]*fftypes.Node{{}}, nil, nil) + or.mdi.On("GetIdentities", mock.Anything, mock.Anything).Return([]*fftypes.Identity{{}}, nil, nil) or.mdx.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) err := or.initDataExchange(or.ctx) diff --git a/internal/orchestrator/status.go b/internal/orchestrator/status.go index a504caf4eb..35e3293f32 100644 --- a/internal/orchestrator/status.go +++ b/internal/orchestrator/status.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -18,6 +18,7 @@ package orchestrator import ( "context" + "fmt" "github.com/hyperledger/firefly/internal/config" "github.com/hyperledger/firefly/internal/log" @@ -43,34 +44,35 @@ func (or *orchestrator) GetNodeUUID(ctx context.Context) (node *fftypes.UUID) { func (or *orchestrator) GetStatus(ctx context.Context) (status *fftypes.NodeStatus, err error) { - orgKey, _ := or.identity.GetLocalOrgKey(ctx) + org, err := or.identity.GetNodeOwnerOrg(ctx) + if err != nil { + log.L(ctx).Warnf("Failed to query local org for status: %s", err) + } status = &fftypes.NodeStatus{ Node: fftypes.NodeStatusNode{ Name: config.GetString(config.NodeName), }, Org: fftypes.NodeStatusOrg{ - Name: config.GetString(config.OrgName), - Identity: orgKey, + Name: config.GetString(config.OrgName), }, Defaults: fftypes.NodeStatusDefaults{ Namespace: config.GetString(config.NamespacesDefault), }, } - org, err := or.database.GetOrganizationByName(ctx, status.Org.Name) - if err != nil { - return nil, err - } if org != nil { status.Org.Registered = true status.Org.ID = org.ID - status.Org.Identity = org.Identity + status.Org.DID = org.DID - node, err := or.database.GetNode(ctx, org.Identity, status.Node.Name) + node, _, err := or.identity.CachedIdentityLookup(ctx, fmt.Sprintf("%s%s", fftypes.FireFlyNodeDIDPrefix, status.Node.Name)) if err != nil { return nil, err } - + if node != nil && !node.Parent.Equals(org.ID) { + log.L(ctx).Errorf("Specified node name is in use by another org: %s", err) + node = nil + } if node != nil { status.Node.Registered = true status.Node.ID = node.ID diff --git a/internal/orchestrator/status_test.go b/internal/orchestrator/status_test.go index 97522ff2bc..43880a76a0 100644 --- a/internal/orchestrator/status_test.go +++ b/internal/orchestrator/status_test.go @@ -21,11 +21,9 @@ import ( "testing" "github.com/hyperledger/firefly/internal/config" - "github.com/hyperledger/firefly/mocks/databasemocks" "github.com/hyperledger/firefly/mocks/identitymanagermocks" "github.com/hyperledger/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) func TestGetStatusRegistered(t *testing.T) { @@ -39,19 +37,21 @@ func TestGetStatusRegistered(t *testing.T) { orgID := fftypes.NewUUID() nodeID := fftypes.NewUUID() - mdi := or.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", or.ctx, "org1").Return(&fftypes.Organization{ - ID: orgID, - Identity: "0x1111111", - Name: "org1", - }, nil) - mdi.On("GetNode", or.ctx, "0x1111111", "node1").Return(&fftypes.Node{ - ID: nodeID, - Name: "node1", - Owner: "0x1111111", - }, nil) mim := or.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", mock.Anything).Return("0x1111111", nil) + mim.On("GetNodeOwnerOrg", or.ctx).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + Name: "org1", + DID: "did:firefly:org/org1", + }, + }, nil) + mim.On("CachedIdentityLookup", or.ctx, "did:firefly:node/node1").Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: nodeID, + Name: "node1", + Parent: orgID, + }, + }, false, nil) status, err := or.GetStatus(or.ctx) assert.NoError(t, err) @@ -60,7 +60,7 @@ func TestGetStatusRegistered(t *testing.T) { assert.Equal(t, "org1", status.Org.Name) assert.True(t, status.Org.Registered) - assert.Equal(t, "0x1111111", status.Org.Identity) + assert.Equal(t, "did:firefly:org/org1", status.Org.DID) assert.Equal(t, *orgID, *status.Org.ID) assert.Equal(t, "node1", status.Node.Name) @@ -72,7 +72,7 @@ func TestGetStatusRegistered(t *testing.T) { } -func TestGetStatusUnregistered(t *testing.T) { +func TestGetStatusWrongNodeOwner(t *testing.T) { or := newTestOrchestrator() config.Reset() @@ -80,10 +80,24 @@ func TestGetStatusUnregistered(t *testing.T) { config.Set(config.OrgName, "org1") config.Set(config.NodeName, "node1") - mdi := or.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", or.ctx, "org1").Return(nil, nil) + orgID := fftypes.NewUUID() + nodeID := fftypes.NewUUID() + mim := or.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", mock.Anything).Return("0x1111111", nil) + mim.On("GetNodeOwnerOrg", or.ctx).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + Name: "org1", + DID: "did:firefly:org/org1", + }, + }, nil) + mim.On("CachedIdentityLookup", or.ctx, "did:firefly:node/node1").Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: nodeID, + Name: "node1", + Parent: fftypes.NewUUID(), + }, + }, false, nil) status, err := or.GetStatus(or.ctx) assert.NoError(t, err) @@ -91,16 +105,17 @@ func TestGetStatusUnregistered(t *testing.T) { assert.Equal(t, "default", status.Defaults.Namespace) assert.Equal(t, "org1", status.Org.Name) - assert.False(t, status.Org.Registered) + assert.True(t, status.Org.Registered) + assert.Equal(t, "did:firefly:org/org1", status.Org.DID) + assert.Equal(t, *orgID, *status.Org.ID) assert.Equal(t, "node1", status.Node.Name) assert.False(t, status.Node.Registered) - - assert.Nil(t, or.GetNodeUUID(or.ctx)) + assert.Nil(t, status.Node.ID) } -func TestGetStatusOrgOnlyRegistered(t *testing.T) { +func TestGetStatusUnregistered(t *testing.T) { or := newTestOrchestrator() config.Reset() @@ -108,17 +123,8 @@ func TestGetStatusOrgOnlyRegistered(t *testing.T) { config.Set(config.OrgName, "org1") config.Set(config.NodeName, "node1") - orgID := fftypes.NewUUID() - - mdi := or.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", or.ctx, "org1").Return(&fftypes.Organization{ - ID: orgID, - Identity: "0x1111111", - Name: "org1", - }, nil) - mdi.On("GetNode", or.ctx, "0x1111111", "node1").Return(nil, nil) mim := or.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", mock.Anything).Return("0x1111111", nil) + mim.On("GetNodeOwnerOrg", or.ctx).Return(nil, fmt.Errorf("pop")) status, err := or.GetStatus(or.ctx) assert.NoError(t, err) @@ -126,17 +132,16 @@ func TestGetStatusOrgOnlyRegistered(t *testing.T) { assert.Equal(t, "default", status.Defaults.Namespace) assert.Equal(t, "org1", status.Org.Name) - assert.True(t, status.Org.Registered) - assert.Equal(t, "0x1111111", status.Org.Identity) - assert.Equal(t, *orgID, *status.Org.ID) + assert.False(t, status.Org.Registered) assert.Equal(t, "node1", status.Node.Name) assert.False(t, status.Node.Registered) assert.Nil(t, or.GetNodeUUID(or.ctx)) + } -func TestGetStatuOrgError(t *testing.T) { +func TestGetStatusOrgOnlyRegistered(t *testing.T) { or := newTestOrchestrator() config.Reset() @@ -144,13 +149,32 @@ func TestGetStatuOrgError(t *testing.T) { config.Set(config.OrgName, "org1") config.Set(config.NodeName, "node1") - mdi := or.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", or.ctx, "org1").Return(nil, fmt.Errorf("pop")) + orgID := fftypes.NewUUID() + mim := or.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", mock.Anything).Return("0x1111111", nil) + mim.On("GetNodeOwnerOrg", or.ctx).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + Name: "org1", + DID: "did:firefly:org/org1", + }, + }, nil) + mim.On("CachedIdentityLookup", or.ctx, "did:firefly:node/node1").Return(nil, false, nil) - _, err := or.GetStatus(or.ctx) - assert.EqualError(t, err, "pop") + status, err := or.GetStatus(or.ctx) + assert.NoError(t, err) + + assert.Equal(t, "default", status.Defaults.Namespace) + + assert.Equal(t, "org1", status.Org.Name) + assert.True(t, status.Org.Registered) + assert.Equal(t, "did:firefly:org/org1", status.Org.DID) + assert.Equal(t, *orgID, *status.Org.ID) + + assert.Equal(t, "node1", status.Node.Name) + assert.False(t, status.Node.Registered) + + assert.Nil(t, or.GetNodeUUID(or.ctx)) } func TestGetStatusNodeError(t *testing.T) { @@ -163,15 +187,15 @@ func TestGetStatusNodeError(t *testing.T) { orgID := fftypes.NewUUID() - mdi := or.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", or.ctx, "org1").Return(&fftypes.Organization{ - ID: orgID, - Identity: "0x1111111", - Name: "org1", - }, nil) - mdi.On("GetNode", or.ctx, "0x1111111", "node1").Return(nil, fmt.Errorf("pop")) mim := or.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", mock.Anything).Return("0x1111111", nil) + mim.On("GetNodeOwnerOrg", or.ctx).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: orgID, + Name: "org1", + DID: "did:firefly:org/org1", + }, + }, nil) + mim.On("CachedIdentityLookup", or.ctx, "did:firefly:node/node1").Return(nil, false, fmt.Errorf("pop")) _, err := or.GetStatus(or.ctx) assert.EqualError(t, err, "pop") diff --git a/internal/privatemessaging/groupmanager.go b/internal/privatemessaging/groupmanager.go index b8bdbd7c1d..4b46e2fa9e 100644 --- a/internal/privatemessaging/groupmanager.go +++ b/internal/privatemessaging/groupmanager.go @@ -45,7 +45,7 @@ type groupManager struct { type groupHashEntry struct { group *fftypes.Group - nodes []*fftypes.Node + nodes []*fftypes.Identity } func (gm *groupManager) EnsureLocalGroup(ctx context.Context, group *fftypes.Group) (ok bool, err error) { @@ -77,7 +77,7 @@ func (gm *groupManager) EnsureLocalGroup(ctx context.Context, group *fftypes.Gro return true, nil } -func (gm *groupManager) groupInit(ctx context.Context, signer *fftypes.Identity, group *fftypes.Group) (err error) { +func (gm *groupManager) groupInit(ctx context.Context, signer *fftypes.SignerRef, group *fftypes.Group) (err error) { // Serialize it into a data object, as a piece of data we can write to a message data := &fftypes.Data{ @@ -118,8 +118,8 @@ func (gm *groupManager) groupInit(ctx context.Context, signer *fftypes.Identity, Group: group.Hash, Namespace: group.Namespace, // Must go into the same ordering context as the message itself Type: fftypes.MessageTypeGroupInit, - Identity: *signer, - Tag: string(fftypes.SystemTagDefineGroup), + SignerRef: *signer, + Tag: fftypes.SystemTagDefineGroup, Topics: fftypes.FFStringArray{group.Topic()}, TxType: fftypes.TransactionTypeBatchPin, }, @@ -157,7 +157,7 @@ func (gm *groupManager) GetGroups(ctx context.Context, filter database.AndFilter return gm.database.GetGroups(ctx, filter) } -func (gm *groupManager) getGroupNodes(ctx context.Context, groupHash *fftypes.Bytes32) (*fftypes.Group, []*fftypes.Node, error) { +func (gm *groupManager) getGroupNodes(ctx context.Context, groupHash *fftypes.Bytes32) (*fftypes.Group, []*fftypes.Identity, error) { if cached := gm.groupCache.Get(groupHash.String()); cached != nil { cached.Extend(gm.groupCacheTTL) @@ -175,14 +175,14 @@ func (gm *groupManager) getGroupNodes(ctx context.Context, groupHash *fftypes.By // We de-duplicate nodes in the case that the payload needs to be received by multiple org identities // that share a single node. - nodes := make([]*fftypes.Node, 0, len(group.Members)) + nodes := make([]*fftypes.Identity, 0, len(group.Members)) knownIDs := make(map[fftypes.UUID]bool) for _, r := range group.Members { - node, err := gm.database.GetNodeByID(ctx, r.Node) + node, err := gm.database.GetIdentityByID(ctx, r.Node) if err != nil { return nil, nil, err } - if node == nil { + if node == nil || node.Type != fftypes.IdentityTypeNode { return nil, nil, i18n.NewError(ctx, i18n.MsgNodeNotFound, r.Node) } if !knownIDs[*node.ID] { @@ -204,7 +204,7 @@ func (gm *groupManager) getGroupNodes(ctx context.Context, groupHash *fftypes.By // // Errors are only returned for database issues. For validation issues, a nil group is returned without an error. func (gm *groupManager) ResolveInitGroup(ctx context.Context, msg *fftypes.Message) (*fftypes.Group, error) { - if msg.Header.Tag == string(fftypes.SystemTagDefineGroup) { + if msg.Header.Tag == fftypes.SystemTagDefineGroup { // Store the new group data, foundAll, err := gm.data.GetMessageData(ctx, msg, true) if err != nil || !foundAll || len(data) == 0 { diff --git a/internal/privatemessaging/groupmanager_test.go b/internal/privatemessaging/groupmanager_test.go index b321f4b337..7f2dc26ccf 100644 --- a/internal/privatemessaging/groupmanager_test.go +++ b/internal/privatemessaging/groupmanager_test.go @@ -34,7 +34,7 @@ func TestGroupInitSealFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - err := pm.groupInit(pm.ctx, &fftypes.Identity{}, &fftypes.Group{}) + err := pm.groupInit(pm.ctx, &fftypes.SignerRef{}, &fftypes.Group{}) assert.Regexp(t, "FF10137", err) } @@ -55,7 +55,7 @@ func TestGroupInitWriteGroupFail(t *testing.T) { }, } group.Seal() - err := pm.groupInit(pm.ctx, &fftypes.Identity{}, group) + err := pm.groupInit(pm.ctx, &fftypes.SignerRef{}, group) assert.Regexp(t, "pop", err) } @@ -77,7 +77,7 @@ func TestGroupInitWriteDataFail(t *testing.T) { }, } group.Seal() - err := pm.groupInit(pm.ctx, &fftypes.Identity{}, group) + err := pm.groupInit(pm.ctx, &fftypes.SignerRef{}, group) assert.Regexp(t, "pop", err) } @@ -92,9 +92,9 @@ func TestResolveInitGroupMissingData(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -117,9 +117,9 @@ func TestResolveInitGroupBadData(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -142,9 +142,9 @@ func TestResolveInitGroupBadValidation(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -180,9 +180,9 @@ func TestResolveInitGroupBadGroupID(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -220,9 +220,9 @@ func TestResolveInitGroupUpsertFail(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: group.Hash, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -261,9 +261,9 @@ func TestResolveInitGroupNewOk(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Namespace: fftypes.SystemNamespace, - Tag: string(fftypes.SystemTagDefineGroup), + Tag: fftypes.SystemTagDefineGroup, Group: group.Hash, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -287,7 +287,7 @@ func TestResolveInitGroupExistingOK(t *testing.T) { Namespace: "ns1", Tag: "mytag", Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -309,7 +309,7 @@ func TestResolveInitGroupExistingFail(t *testing.T) { Namespace: "ns1", Tag: "mytag", Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -331,7 +331,7 @@ func TestResolveInitGroupExistingNotFound(t *testing.T) { Namespace: "ns1", Tag: "mytag", Group: fftypes.NewRandB32(), - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "author1", Key: "0x12345", }, @@ -409,8 +409,11 @@ func TestGetGroupNodesCache(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(group, nil).Once() - mdi.On("GetNodeByID", pm.ctx, mock.Anything).Return(&fftypes.Node{ - ID: node1, + mdi.On("GetIdentityByID", pm.ctx, mock.Anything).Return(&fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: node1, + Type: fftypes.IdentityTypeNode, + }, }, nil).Once() g, nodes, err := pm.getGroupNodes(pm.ctx, group.Hash) @@ -465,7 +468,7 @@ func TestGetGroupNodesNodeLookupFail(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(group, nil).Once() - mdi.On("GetNodeByID", pm.ctx, node1).Return(nil, fmt.Errorf("pop")).Once() + mdi.On("GetIdentityByID", pm.ctx, node1).Return(nil, fmt.Errorf("pop")).Once() _, _, err := pm.getGroupNodes(pm.ctx, group.Hash) assert.EqualError(t, err, "pop") @@ -486,7 +489,7 @@ func TestGetGroupNodesNodeLookupNotFound(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(group, nil).Once() - mdi.On("GetNodeByID", pm.ctx, node1).Return(nil, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node1).Return(nil, nil).Once() _, _, err := pm.getGroupNodes(pm.ctx, group.Hash) assert.Regexp(t, "FF10224", err) diff --git a/internal/privatemessaging/message.go b/internal/privatemessaging/message.go index 7f7d15372a..b7a82ed685 100644 --- a/internal/privatemessaging/message.go +++ b/internal/privatemessaging/message.go @@ -142,7 +142,7 @@ func (s *messageSender) resolveAndSend(ctx context.Context, method sendMethod) e func (s *messageSender) resolve(ctx context.Context) error { // Resolve the sending identity - if err := s.mgr.identity.ResolveInputIdentity(ctx, &s.msg.Header.Identity); err != nil { + if err := s.mgr.identity.ResolveInputSigningIdentity(ctx, s.msg.Header.Namespace, &s.msg.Header.SignerRef); err != nil { return i18n.WrapError(ctx, err, i18n.MsgAuthorInvalid) } diff --git a/internal/privatemessaging/message_test.go b/internal/privatemessaging/message_test.go index ddc0396636..5901e14bf8 100644 --- a/internal/privatemessaging/message_test.go +++ b/internal/privatemessaging/message_test.go @@ -17,6 +17,7 @@ package privatemessaging import ( + "context" "fmt" "testing" @@ -32,15 +33,54 @@ import ( "github.com/stretchr/testify/mock" ) +func newTestOrg(name string) *fftypes.Identity { + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeOrg, + Namespace: fftypes.SystemNamespace, + Name: name, + Parent: nil, + }, + } + identity.DID, _ = identity.GenerateDID(context.Background()) + return identity +} + +func newTestNode(name string, owner *fftypes.Identity) *fftypes.Identity { + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: fftypes.NewUUID(), + Type: fftypes.IdentityTypeNode, + Namespace: fftypes.SystemNamespace, + Name: name, + Parent: owner.ID, + }, + IdentityProfile: fftypes.IdentityProfile{ + Profile: fftypes.JSONObject{ + "id": fmt.Sprintf("%s-peer", name), + "url": fmt.Sprintf("https://%s.example.com", name), + }, + }, + } + identity.DID, _ = identity.GenerateDID(context.Background()) + return identity +} + func TestSendConfirmMessageE2EOk(t *testing.T) { pm, cancel := newTestPrivateMessagingWithMetrics(t) defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + rootOrg := newTestOrg("rootorg") + intermediateOrg := newTestOrg("localorg") + intermediateOrg.Parent = rootOrg.ID + localNode := newTestNode("node1", intermediateOrg) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(intermediateOrg, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(intermediateOrg, false, nil) + mim.On("CachedIdentityLookupByID", pm.ctx, rootOrg.ID).Return(rootOrg, nil) dataID := fftypes.NewUUID() mdm := pm.data.(*datamocks.Manager) @@ -49,18 +89,8 @@ func TestSendConfirmMessageE2EOk(t *testing.T) { }, nil) mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "localorg").Return(&fftypes.Organization{ - ID: fftypes.NewUUID(), - }, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{ - {ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}, - }, nil, nil).Once() - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ - ID: fftypes.NewUUID(), - }, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{ - {ID: fftypes.NewUUID(), Name: "node1", Owner: "org1"}, - }, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil).Once() mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{ {Hash: fftypes.NewRandB32()}, }, nil, nil).Once() @@ -92,6 +122,9 @@ func TestSendConfirmMessageE2EOk(t *testing.T) { assert.NoError(t, err) assert.Equal(t, retMsg, msg) + mim.AssertExpectations(t) + mdm.AssertExpectations(t) + mdi.AssertExpectations(t) } func TestSendUnpinnedMessageE2EOk(t *testing.T) { @@ -100,8 +133,8 @@ func TestSendUnpinnedMessageE2EOk(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Run(func(args mock.Arguments) { + identity := args[2].(*fftypes.SignerRef) identity.Author = "localorg" identity.Key = "localkey" }).Return(nil) @@ -149,7 +182,7 @@ func TestSendMessageBadGroup(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) _, err := pm.SendMessage(pm.ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ @@ -169,7 +202,7 @@ func TestSendMessageBadIdentity(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(fmt.Errorf("pop")) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(fmt.Errorf("pop")) _, err := pm.SendMessage(pm.ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ @@ -193,21 +226,19 @@ func TestSendMessageFail(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + localOrg := newTestOrg("localorg") + localNode := newTestNode("node1", localOrg) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Run(func(args mock.Arguments) { + identity := args[2].(*fftypes.SignerRef) identity.Author = "localorg" identity.Key = "localkey" }).Return(nil) + mim.On("CachedIdentityLookup", pm.ctx, "localorg").Return(localOrg, false, nil) mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "localorg").Return(&fftypes.Organization{ - ID: fftypes.NewUUID(), - }, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{ - {ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}, - }, nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil) mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{ {Hash: fftypes.NewRandB32()}, }, nil, nil) @@ -243,22 +274,19 @@ func TestResolveAndSendBadInlineData(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + localOrg := newTestOrg("localorg") + localNode := newTestNode("node1", localOrg) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Run(func(args mock.Arguments) { + identity := args[2].(*fftypes.SignerRef) identity.Author = "localorg" identity.Key = "localkey" }).Return(nil) + mim.On("CachedIdentityLookup", pm.ctx, "localorg").Return(localOrg, false, nil) mdi := pm.database.(*databasemocks.Plugin) - - mdi.On("GetOrganizationByName", pm.ctx, "localorg").Return(&fftypes.Organization{ - ID: fftypes.NewUUID(), - }, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{ - {ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}, - }, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil).Once() mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{ {Hash: fftypes.NewRandB32()}, }, nil, nil).Once() @@ -295,8 +323,8 @@ func TestSendUnpinnedMessageTooLarge(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Run(func(args mock.Arguments) { + identity := args[2].(*fftypes.SignerRef) identity.Author = "localorg" identity.Key = "localkey" }).Return(nil) @@ -360,22 +388,19 @@ func TestMessagePrepare(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + localOrg := newTestOrg("localorg") + localNode := newTestNode("node1", localOrg) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Run(func(args mock.Arguments) { + identity := args[2].(*fftypes.SignerRef) identity.Author = "localorg" identity.Key = "localkey" }).Return(nil) + mim.On("CachedIdentityLookup", pm.ctx, "localorg").Return(localOrg, false, nil) mdi := pm.database.(*databasemocks.Plugin) - - mdi.On("GetOrganizationByName", pm.ctx, "localorg").Return(&fftypes.Organization{ - ID: fftypes.NewUUID(), - }, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{ - {ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}, - }, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil).Once() mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{ {Hash: fftypes.NewRandB32()}, }, nil, nil).Once() @@ -423,7 +448,7 @@ func TestSendUnpinnedMessageGroupLookupFail(t *testing.T) { Messages: []*fftypes.Message{ { Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, TxType: fftypes.TransactionTypeUnpinned, @@ -445,7 +470,7 @@ func TestSendUnpinnedMessageInsertFail(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Empty(t, identity.Author) return true })).Return(nil) @@ -491,7 +516,7 @@ func TestSendUnpinnedMessageConfirmFail(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(fmt.Errorf("pop")) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(fmt.Errorf("pop")) _, err := pm.SendMessage(pm.ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ @@ -518,7 +543,7 @@ func TestSendUnpinnedMessageResolveGroupFail(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) groupID := fftypes.NewRandB32() @@ -557,7 +582,7 @@ func TestSendUnpinnedMessageResolveGroupNotFound(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) groupID := fftypes.NewRandB32() @@ -625,7 +650,7 @@ func TestRequestReplySuccess(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Return(nil) + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.Anything).Return(nil) msa := pm.syncasync.(*syncasyncmocks.Bridge) msa.On("WaitForReply", pm.ctx, "ns1", mock.Anything, mock.Anything). @@ -651,7 +676,7 @@ func TestRequestReplySuccess(t *testing.T) { Header: fftypes.MessageHeader{ Tag: "mytag", Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -666,31 +691,27 @@ func TestDispatchedUnpinnedMessageMarshalFail(t *testing.T) { defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Equal(t, "localorg", identity.Author) return true })).Return(nil) groupID := fftypes.NewRandB32() - nodeID1 := fftypes.NewUUID() - nodeID2 := fftypes.NewUUID() + node1 := newTestNode("node1", newTestOrg("localorg")) + node2 := newTestNode("node2", newTestOrg("remoteorg")) mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ Hash: groupID, GroupIdentity: fftypes.GroupIdentity{ Members: fftypes.Members{ - {Node: nodeID1, Identity: "localorg"}, - {Node: nodeID2, Identity: "remoteorg"}, + {Node: node1.ID, Identity: "localorg"}, + {Node: node1.ID, Identity: "remoteorg"}, }, }, }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID1).Return(&fftypes.Node{ - ID: nodeID1, Name: "node1", Owner: "localorg", DX: fftypes.DXInfo{Peer: "peer1-local"}, - }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID2).Return(&fftypes.Node{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node2, nil).Once() err := pm.dispatchUnpinnedBatch(pm.ctx, &fftypes.Batch{ ID: fftypes.NewUUID(), @@ -712,36 +733,34 @@ func TestDispatchedUnpinnedMessageOK(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node1 := newTestNode("node1", localOrg) + node2 := newTestNode("node2", newTestOrg("remoteorg")) + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputSigningIdentity", pm.ctx, "ns1", mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Equal(t, "localorg", identity.Author) return true })).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdx := pm.exchange.(*dataexchangemocks.Plugin) - mdx.On("SendMessage", pm.ctx, mock.Anything, "peer2-remote", mock.Anything).Return(nil) - - groupID := fftypes.NewRandB32() - nodeID1 := fftypes.NewUUID() - nodeID2 := fftypes.NewUUID() + mdx.On("SendMessage", pm.ctx, mock.Anything, "node2-peer", mock.Anything).Return(nil) mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ Hash: groupID, GroupIdentity: fftypes.GroupIdentity{ Members: fftypes.Members{ - {Node: nodeID1, Identity: "localorg"}, - {Node: nodeID2, Identity: "remoteorg"}, + {Node: node1.ID, Identity: "localorg"}, + {Node: node2.ID, Identity: "remoteorg"}, }, }, }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID1).Return(&fftypes.Node{ - ID: nodeID1, Name: "node1", Owner: "localorg", DX: fftypes.DXInfo{Peer: "peer1-local"}, - }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID2).Return(&fftypes.Node{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() + mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(nil) err := pm.dispatchUnpinnedBatch(pm.ctx, &fftypes.Batch{ @@ -757,7 +776,7 @@ func TestDispatchedUnpinnedMessageOK(t *testing.T) { Header: fftypes.MessageHeader{ Tag: "mytag", Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -776,23 +795,21 @@ func TestSendDataTransferBlobsFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node2 := newTestNode("node2", newTestOrg("remoteorg")) + nodes := []*fftypes.Identity{node2} + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Equal(t, "localorg", identity.Author) return true })).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) - - groupID := fftypes.NewRandB32() - nodeID2 := fftypes.NewUUID() + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetBlobMatchingHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) - nodes := []*fftypes.Node{{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }} - err := pm.sendData(pm.ctx, &fftypes.TransportWrapper{ Batch: &fftypes.Batch{ ID: fftypes.NewUUID(), @@ -803,7 +820,7 @@ func TestSendDataTransferBlobsFail(t *testing.T) { Header: fftypes.MessageHeader{ Tag: "mytag", Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -828,25 +845,23 @@ func TestSendDataTransferFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node2 := newTestNode("node2", newTestOrg("remoteorg")) + nodes := []*fftypes.Identity{node2} + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Equal(t, "localorg", identity.Author) return true })).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi := pm.database.(*databasemocks.Plugin) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(nil) mdx := pm.exchange.(*dataexchangemocks.Plugin) - mdx.On("SendMessage", pm.ctx, mock.Anything, "peer2-remote", mock.Anything).Return(fmt.Errorf("pop")) - - groupID := fftypes.NewRandB32() - nodeID2 := fftypes.NewUUID() - - nodes := []*fftypes.Node{{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }} + mdx.On("SendMessage", pm.ctx, mock.Anything, "node2-peer", mock.Anything).Return(fmt.Errorf("pop")) err := pm.sendData(pm.ctx, &fftypes.TransportWrapper{ Batch: &fftypes.Batch{ @@ -858,7 +873,7 @@ func TestSendDataTransferFail(t *testing.T) { Header: fftypes.MessageHeader{ Tag: "mytag", Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -878,23 +893,21 @@ func TestSendDataTransferInsertOperationFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node2 := newTestNode("node2", newTestOrg("remoteorg")) + nodes := []*fftypes.Identity{node2} + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { + mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.SignerRef) bool { assert.Equal(t, "localorg", identity.Author) return true })).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi := pm.database.(*databasemocks.Plugin) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(fmt.Errorf("pop")) - groupID := fftypes.NewRandB32() - nodeID2 := fftypes.NewUUID() - - nodes := []*fftypes.Node{{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }} - err := pm.sendData(pm.ctx, &fftypes.TransportWrapper{ Batch: &fftypes.Batch{ ID: fftypes.NewUUID(), @@ -905,7 +918,7 @@ func TestSendDataTransferInsertOperationFail(t *testing.T) { Header: fftypes.MessageHeader{ Tag: "mytag", Group: groupID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -917,59 +930,3 @@ func TestSendDataTransferInsertOperationFail(t *testing.T) { assert.Regexp(t, "pop", err) } - -func TestDispatchedUnpinnedMessageGetOrgFail(t *testing.T) { - - pm, cancel := newTestPrivateMessaging(t) - defer cancel() - - mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { - assert.Equal(t, "localorg", identity.Author) - return true - })).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("", fmt.Errorf("pop")) - - groupID := fftypes.NewRandB32() - nodeID1 := fftypes.NewUUID() - nodeID2 := fftypes.NewUUID() - - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ - Hash: groupID, - GroupIdentity: fftypes.GroupIdentity{ - Members: fftypes.Members{ - {Node: nodeID1, Identity: "localorg"}, - {Node: nodeID2, Identity: "remoteorg"}, - }, - }, - }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID1).Return(&fftypes.Node{ - ID: nodeID1, Name: "node1", Owner: "localorg", DX: fftypes.DXInfo{Peer: "peer1-local"}, - }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, nodeID2).Return(&fftypes.Node{ - ID: nodeID2, Name: "node2", Owner: "org1", DX: fftypes.DXInfo{Peer: "peer2-remote"}, - }, nil).Once() - - err := pm.dispatchUnpinnedBatch(pm.ctx, &fftypes.Batch{ - ID: fftypes.NewUUID(), - Group: groupID, - Payload: fftypes.BatchPayload{ - Messages: []*fftypes.Message{ - { - Header: fftypes.MessageHeader{ - Tag: "mytag", - Group: groupID, - Identity: fftypes.Identity{ - Author: "org1", - }, - }, - }, - }, - }, - }, []*fftypes.Bytes32{}) - assert.Regexp(t, "pop", err) - - mdi.AssertExpectations(t) - -} diff --git a/internal/privatemessaging/privatemessaging.go b/internal/privatemessaging/privatemessaging.go index 7717d4d180..16ff9db9f2 100644 --- a/internal/privatemessaging/privatemessaging.go +++ b/internal/privatemessaging/privatemessaging.go @@ -142,6 +142,7 @@ func (pm *privateMessaging) dispatchPinnedBatch(ctx context.Context, batch *ffty return err } + log.L(ctx).Infof("Pinning private batch %s with author=%s key=%s group=%s", batch.ID, batch.Author, batch.Key, batch.Group) return pm.batchpin.SubmitPinnedBatch(ctx, batch, contexts) } @@ -169,7 +170,7 @@ func (pm *privateMessaging) dispatchBatchCommon(ctx context.Context, batch *ffty return pm.sendData(ctx, tw, nodes) } -func (pm *privateMessaging) transferBlobs(ctx context.Context, data []*fftypes.Data, txid *fftypes.UUID, node *fftypes.Node) error { +func (pm *privateMessaging) transferBlobs(ctx context.Context, data []*fftypes.Data, txid *fftypes.UUID, node *fftypes.Identity) error { // Send all the blobs associated with this batch for _, d := range data { // We only need to send a blob if there is one, and it's not been uploaded to the public storage @@ -194,7 +195,7 @@ func (pm *privateMessaging) transferBlobs(ctx context.Context, data []*fftypes.D return err } - if err := pm.exchange.TransferBLOB(ctx, op.ID, node.DX.Peer, blob.PayloadRef); err != nil { + if err := pm.exchange.TransferBLOB(ctx, op.ID, node.Profile.GetString("id"), blob.PayloadRef); err != nil { return err } } @@ -202,7 +203,7 @@ func (pm *privateMessaging) transferBlobs(ctx context.Context, data []*fftypes.D return nil } -func (pm *privateMessaging) sendData(ctx context.Context, tw *fftypes.TransportWrapper, nodes []*fftypes.Node) (err error) { +func (pm *privateMessaging) sendData(ctx context.Context, tw *fftypes.TransportWrapper, nodes []*fftypes.Identity) (err error) { l := log.L(ctx) batch := tw.Batch @@ -211,8 +212,8 @@ func (pm *privateMessaging) sendData(ctx context.Context, tw *fftypes.TransportW return i18n.WrapError(ctx, err, i18n.MsgSerializationFailed) } - // TODO: move to using DIDs consistently as the way to reference the node/organization (i.e. node.Owner becomes a DID) - localOrgSigingKey, err := pm.identity.GetLocalOrgKey(ctx) + // Lookup the local org + localOrg, err := pm.identity.GetNodeOwnerOrg(ctx) if err != nil { return err } @@ -220,7 +221,7 @@ func (pm *privateMessaging) sendData(ctx context.Context, tw *fftypes.TransportW // Write it to the dataexchange for each member for i, node := range nodes { - if node.Owner == localOrgSigingKey { + if node.Parent.Equals(localOrg.ID) { l.Debugf("Skipping send of batch for local node %s:%s for group=%s node=%s (%d/%d)", batch.Namespace, batch.ID, batch.Group, node.ID, i+1, len(nodes)) continue } @@ -245,7 +246,7 @@ func (pm *privateMessaging) sendData(ctx context.Context, tw *fftypes.TransportW } // Send the payload itself - err := pm.exchange.SendMessage(ctx, op.ID, node.DX.Peer, payload) + err := pm.exchange.SendMessage(ctx, op.ID, node.Profile.GetString("id"), payload) if err != nil { return err } diff --git a/internal/privatemessaging/privatemessaging_test.go b/internal/privatemessaging/privatemessaging_test.go index 093a0333a3..bc88e93adf 100644 --- a/internal/privatemessaging/privatemessaging_test.go +++ b/internal/privatemessaging/privatemessaging_test.go @@ -103,12 +103,13 @@ func TestDispatchBatchWithBlobs(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") batchID := fftypes.NewUUID() groupID := fftypes.NewRandB32() pin1 := fftypes.NewRandB32() pin2 := fftypes.NewRandB32() - node1 := fftypes.NewUUID() - node2 := fftypes.NewUUID() + node1 := newTestNode("node1", localOrg) + node2 := newTestNode("node2", newTestOrg("remoteorg")) txID := fftypes.NewUUID() batchHash := fftypes.NewRandB32() dataID1 := fftypes.NewUUID() @@ -120,52 +121,32 @@ func TestDispatchBatchWithBlobs(t *testing.T) { mim := pm.identity.(*identitymanagermocks.Manager) mim.On("ResolveInputIdentity", pm.ctx, mock.Anything).Run(func(args mock.Arguments) { - identity := args[1].(*fftypes.Identity) + identity := args[1].(*fftypes.SignerRef) assert.Equal(t, "org1", identity.Author) identity.Key = "0x12345" }).Return(nil) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ Hash: fftypes.NewRandB32(), GroupIdentity: fftypes.GroupIdentity{ Name: "group1", Members: fftypes.Members{ - {Identity: "org1", Node: node1}, - {Identity: "org2", Node: node2}, + {Identity: "org1", Node: node1.ID}, + {Identity: "org2", Node: node2.ID}, }, }, }, nil) - mdi.On("GetNodeByID", pm.ctx, node1).Return(&fftypes.Node{ - ID: node1, - DX: fftypes.DXInfo{ - Peer: "node1", - Endpoint: fftypes.JSONObject{"url": "https://node1.example.com"}, - }, - }, nil).Once() - mdi.On("GetNodeByID", pm.ctx, node2).Return(&fftypes.Node{ - ID: node2, - DX: fftypes.DXInfo{ - Peer: "node2", - Endpoint: fftypes.JSONObject{"url": "https://node2.example.com"}, - }, - }, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() mdi.On("GetBlobMatchingHash", pm.ctx, blob1).Return(&fftypes.Blob{ Hash: blob1, PayloadRef: "/blob/1", }, nil) - mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node1", "/blob/1").Return(nil).Once() - mdi.On("InsertOperation", pm.ctx, mock.MatchedBy(func(op *fftypes.Operation) bool { - return op.Type == fftypes.OpTypeDataExchangeBlobSend - })).Return(nil, nil) - mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node2", "/blob/1").Return(nil).Once() + mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node2-peer", "/blob/1").Return(nil).Once() mdi.On("InsertOperation", pm.ctx, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Type == fftypes.OpTypeDataExchangeBlobSend })).Return(nil, nil) - mdx.On("SendMessage", pm.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() - mdi.On("InsertOperation", pm.ctx, mock.MatchedBy(func(op *fftypes.Operation) bool { - return op.Type == fftypes.OpTypeDataExchangeBatchSend - })).Return(nil, nil) mdx.On("SendMessage", pm.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() mdi.On("InsertOperation", pm.ctx, mock.MatchedBy(func(op *fftypes.Operation) bool { return op.Type == fftypes.OpTypeDataExchangeBatchSend @@ -175,7 +156,7 @@ func TestDispatchBatchWithBlobs(t *testing.T) { err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ ID: batchID, - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, Group: groupID, @@ -238,40 +219,54 @@ func TestSendAndSubmitBatchBadID(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) - mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorgkey", nil) - mim.On("ResolveInputIdentity", pm.ctx, mock.MatchedBy(func(identity *fftypes.Identity) bool { - assert.Equal(t, "badauthor", identity.Author) - return true - })).Return(fmt.Errorf("pop")) - mbp := pm.batchpin.(*batchpinmocks.Submitter) mbp.On("SubmitPinnedBatch", pm.ctx, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "badauthor", }, }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) } func TestSendAndSubmitBatchUnregisteredNode(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + groupID := fftypes.NewRandB32() + node1 := newTestNode("node1", newTestOrg("localorg")) + node2 := newTestNode("node2", newTestOrg("remoteorg")) + mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() + mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ + Hash: fftypes.NewRandB32(), + GroupIdentity: fftypes.GroupIdentity{ + Name: "group1", + Members: fftypes.Members{ + {Identity: "org1", Node: node1.ID}, + {Identity: "org2", Node: node2.ID}, + }, + }, + }, nil) mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", pm.ctx).Return("", fmt.Errorf("pop")) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(nil, fmt.Errorf("pop")) err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + Group: groupID, + SignerRef: fftypes.SignerRef{ Author: "badauthor", }, }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + mim.AssertExpectations(t) } func TestSendImmediateFail(t *testing.T) { @@ -281,36 +276,46 @@ func TestSendImmediateFail(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) - mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorg", nil) - - mdx := pm.exchange.(*dataexchangemocks.Plugin) - mdx.On("SendMessage", pm.ctx, mock.Anything, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) - err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) } func TestSendSubmitInsertOperationFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorgkey", nil) + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node1 := newTestNode("node1", localOrg) + node2 := newTestNode("node2", newTestOrg("remoteorg")) - mdx := pm.exchange.(*dataexchangemocks.Plugin) - mdx.On("SendMessage", pm.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil) + mim := pm.identity.(*identitymanagermocks.Manager) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() + mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ + Hash: fftypes.NewRandB32(), + GroupIdentity: fftypes.GroupIdentity{ + Name: "group1", + Members: fftypes.Members{ + {Identity: "org1", Node: node1.ID}, + {Identity: "org2", Node: node2.ID}, + }, + }, + }, nil) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(fmt.Errorf("pop")) err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + Group: groupID, + SignerRef: fftypes.SignerRef{ Author: "org1", }, Payload: fftypes.BatchPayload{ @@ -320,49 +325,120 @@ func TestSendSubmitInsertOperationFail(t *testing.T) { }, }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + mim.AssertExpectations(t) } func TestSendSubmitBlobTransferFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node1 := newTestNode("node1", localOrg) + node2 := newTestNode("node2", newTestOrg("remoteorg")) + blob1 := fftypes.NewRandB32() + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("GetLocalOrgKey", pm.ctx).Return("localorgkey", nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) - mdi.On("GetBlobMatchingHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() + mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ + Hash: fftypes.NewRandB32(), + GroupIdentity: fftypes.GroupIdentity{ + Name: "group1", + Members: fftypes.Members{ + {Identity: "org1", Node: node1.ID}, + {Identity: "org2", Node: node2.ID}, + }, + }, + }, nil) + mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(nil) + mdi.On("GetBlobMatchingHash", pm.ctx, blob1).Return(&fftypes.Blob{ + Hash: blob1, + PayloadRef: "/blob/1", + }, nil) + + mdx := pm.exchange.(*dataexchangemocks.Plugin) + mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node2-peer", "/blob/1").Return(fmt.Errorf("pop")).Once() err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + Group: groupID, + SignerRef: fftypes.SignerRef{ Author: "org1", }, Payload: fftypes.BatchPayload{ Data: []*fftypes.Data{ - {ID: fftypes.NewUUID(), Blob: &fftypes.BlobRef{Hash: fftypes.NewRandB32()}}, + {ID: fftypes.NewUUID(), Blob: &fftypes.BlobRef{Hash: blob1}}, }, }, }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + mim.AssertExpectations(t) + mdx.AssertExpectations(t) } func TestWriteTransactionSubmitBatchPinFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + groupID := fftypes.NewRandB32() + node1 := newTestNode("node1", localOrg) + node2 := newTestNode("node2", newTestOrg("remoteorg")) + blob1 := fftypes.NewRandB32() + + mim := pm.identity.(*identitymanagermocks.Manager) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) + mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetGroupByHash", pm.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) - mdi.On("UpsertTransaction", pm.ctx, mock.Anything, true, false).Return(nil) + mdi.On("GetIdentityByID", pm.ctx, node1.ID).Return(node1, nil).Once() + mdi.On("GetIdentityByID", pm.ctx, node2.ID).Return(node2, nil).Once() + mdi.On("GetGroupByHash", pm.ctx, groupID).Return(&fftypes.Group{ + Hash: fftypes.NewRandB32(), + GroupIdentity: fftypes.GroupIdentity{ + Name: "group1", + Members: fftypes.Members{ + {Identity: "org1", Node: node1.ID}, + {Identity: "org2", Node: node2.ID}, + }, + }, + }, nil) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(nil) + mdi.On("GetBlobMatchingHash", pm.ctx, blob1).Return(&fftypes.Blob{ + Hash: blob1, + PayloadRef: "/blob/1", + }, nil) + + mdx := pm.exchange.(*dataexchangemocks.Plugin) + mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node2-peer", "/blob/1").Return(nil).Once() + mdx.On("SendMessage", pm.ctx, mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() mbp := pm.batchpin.(*batchpinmocks.Submitter) mbp.On("SubmitPinnedBatch", pm.ctx, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) err := pm.dispatchPinnedBatch(pm.ctx, &fftypes.Batch{ - Identity: fftypes.Identity{ + Group: groupID, + SignerRef: fftypes.SignerRef{ Author: "org1", - }}, []*fftypes.Bytes32{}) + }, + Payload: fftypes.BatchPayload{ + Data: []*fftypes.Data{ + {ID: fftypes.NewUUID(), Blob: &fftypes.BlobRef{Hash: blob1}}, + }, + }, + }, []*fftypes.Bytes32{}) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + mim.AssertExpectations(t) + mdx.AssertExpectations(t) + mbp.AssertExpectations(t) } func TestTransferBlobsNotFound(t *testing.T) { @@ -374,8 +450,10 @@ func TestTransferBlobsNotFound(t *testing.T) { err := pm.transferBlobs(pm.ctx, []*fftypes.Data{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32(), Blob: &fftypes.BlobRef{Hash: fftypes.NewRandB32()}}, - }, fftypes.NewUUID(), &fftypes.Node{ID: fftypes.NewUUID(), DX: fftypes.DXInfo{Peer: "peer1"}}) + }, fftypes.NewUUID(), newTestNode("node1", newTestOrg("org1"))) assert.Regexp(t, "FF10239", err) + + mdi.AssertExpectations(t) } func TestTransferBlobsFail(t *testing.T) { @@ -385,13 +463,16 @@ func TestTransferBlobsFail(t *testing.T) { mdi := pm.database.(*databasemocks.Plugin) mdi.On("GetBlobMatchingHash", pm.ctx, mock.Anything).Return(&fftypes.Blob{PayloadRef: "blob/1"}, nil) mdx := pm.exchange.(*dataexchangemocks.Plugin) - mdx.On("TransferBLOB", pm.ctx, mock.Anything, "peer1", "blob/1").Return(fmt.Errorf("pop")) + mdx.On("TransferBLOB", pm.ctx, mock.Anything, "node1-peer", "blob/1").Return(fmt.Errorf("pop")) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(nil) err := pm.transferBlobs(pm.ctx, []*fftypes.Data{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32(), Blob: &fftypes.BlobRef{Hash: fftypes.NewRandB32()}}, - }, fftypes.NewUUID(), &fftypes.Node{ID: fftypes.NewUUID(), DX: fftypes.DXInfo{Peer: "peer1"}}) + }, fftypes.NewUUID(), newTestNode("node1", newTestOrg("org1"))) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) + mdx.AssertExpectations(t) } func TestTransferBlobsOpInsertFail(t *testing.T) { @@ -399,16 +480,16 @@ func TestTransferBlobsOpInsertFail(t *testing.T) { defer cancel() mdi := pm.database.(*databasemocks.Plugin) - mdx := pm.exchange.(*dataexchangemocks.Plugin) mdi.On("GetBlobMatchingHash", pm.ctx, mock.Anything).Return(&fftypes.Blob{PayloadRef: "blob/1"}, nil) - mdx.On("TransferBLOB", pm.ctx, mock.Anything, "peer1", "blob/1").Return(nil) mdi.On("InsertOperation", pm.ctx, mock.Anything).Return(fmt.Errorf("pop")) err := pm.transferBlobs(pm.ctx, []*fftypes.Data{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32(), Blob: &fftypes.BlobRef{Hash: fftypes.NewRandB32()}}, - }, fftypes.NewUUID(), &fftypes.Node{ID: fftypes.NewUUID(), DX: fftypes.DXInfo{Peer: "peer1"}}) + }, fftypes.NewUUID(), newTestNode("node1", newTestOrg("org1"))) assert.Regexp(t, "pop", err) + + mdi.AssertExpectations(t) } func TestStart(t *testing.T) { diff --git a/internal/privatemessaging/recipients.go b/internal/privatemessaging/recipients.go index bfb1edea10..4bffb9703b 100644 --- a/internal/privatemessaging/recipients.go +++ b/internal/privatemessaging/recipients.go @@ -19,7 +19,6 @@ package privatemessaging import ( "context" "fmt" - "strings" "github.com/hyperledger/firefly/internal/i18n" "github.com/hyperledger/firefly/internal/log" @@ -52,60 +51,41 @@ func (pm *privateMessaging) resolveRecipientList(ctx context.Context, in *fftype // If the group is new, we need to do a group initialization, before we send the message itself. if isNew { - return pm.groupManager.groupInit(ctx, &in.Header.Identity, group) + return pm.groupManager.groupInit(ctx, &in.Header.SignerRef, group) } return err } -func (pm *privateMessaging) resolveOrg(ctx context.Context, orgInput string) (org *fftypes.Organization, err error) { - orgInput = strings.TrimPrefix(orgInput, fftypes.FireflyOrgDIDPrefix) - orgID, err := fftypes.ParseUUID(ctx, orgInput) - if err == nil { - org, err = pm.database.GetOrganizationByID(ctx, orgID) - } else { - org, err = pm.database.GetOrganizationByName(ctx, orgInput) - if err == nil && org == nil { - org, err = pm.database.GetOrganizationByIdentity(ctx, orgInput) - } - } - if err != nil { - return nil, err - } - if org == nil { - return nil, i18n.NewError(ctx, i18n.MsgOrgNotFound, orgInput) - } - return org, nil -} - -func (pm *privateMessaging) resolveNode(ctx context.Context, org *fftypes.Organization, nodeInput string) (node *fftypes.Node, err error) { +func (pm *privateMessaging) resolveNode(ctx context.Context, identity *fftypes.Identity, nodeInput string) (node *fftypes.Identity, err error) { + retryable := true if nodeInput != "" { - var nodeID *fftypes.UUID - nodeID, err = fftypes.ParseUUID(ctx, nodeInput) - if err == nil { - node, err = pm.database.GetNodeByID(ctx, nodeID) - } else { - node, err = pm.database.GetNode(ctx, org.Identity, nodeInput) - } + node, retryable, err = pm.identity.CachedIdentityLookup(ctx, nodeInput) } else { // Find any node owned by this organization - var nodes []*fftypes.Node - originalOrgName := fmt.Sprintf("%s/%s", org.Name, org.Identity) - for org != nil && node == nil { - filter := database.NodeQueryFactory.NewFilterLimit(ctx, 1).Eq("owner", org.Identity) - nodes, _, err = pm.database.GetNodes(ctx, filter) + inputIdentityDebugInfo := fmt.Sprintf("%s (%s)", identity.DID, identity.ID) + for identity != nil && node == nil { + var nodes []*fftypes.Identity + if identity.Type == fftypes.IdentityTypeOrg { + fb := database.IdentityQueryFactory.NewFilterLimit(ctx, 1) + filter := fb.And( + fb.Eq("parent", identity.ID), + fb.Eq("type", fftypes.IdentityTypeNode), + ) + nodes, _, err = pm.database.GetIdentities(ctx, filter) + } switch { case err == nil && len(nodes) > 0: - // This org owns a node + // This is an org, and it owns a node node = nodes[0] - case err == nil && org.Parent != "": - // This org has a parent, maybe that org owns a node - org, err = pm.database.GetOrganizationByIdentity(ctx, org.Parent) + case err == nil && identity.Parent != nil: + // This identity has a parent, maybe that org owns a node + identity, err = pm.identity.CachedIdentityLookupByID(ctx, identity.Parent) default: - return nil, i18n.NewError(ctx, i18n.MsgNodeNotFoundInOrg, originalOrgName) + return nil, i18n.NewError(ctx, i18n.MsgNodeNotFoundInOrg, inputIdentityDebugInfo) } } } - if err != nil { + if err != nil && retryable { return nil, err } if node == nil { @@ -116,12 +96,7 @@ func (pm *privateMessaging) resolveNode(ctx context.Context, org *fftypes.Organi func (pm *privateMessaging) getRecipients(ctx context.Context, in *fftypes.MessageInOut) (gi *fftypes.GroupIdentity, err error) { - localOrgDID, err := pm.identity.ResolveLocalOrgDID(ctx) - if err != nil { - return nil, err - } - - localOrg, err := pm.identity.GetLocalOrganization(ctx) + localOrg, err := pm.identity.GetNodeOwnerOrg(ctx) if err != nil { return nil, err } @@ -134,46 +109,49 @@ func (pm *privateMessaging) getRecipients(ctx context.Context, in *fftypes.Messa Members: make(fftypes.Members, len(in.Group.Members)), } for i, rInput := range in.Group.Members { - // Resolve the org - org, err := pm.resolveOrg(ctx, rInput.Identity) + // Resolve the identity + identity, _, err := pm.identity.CachedIdentityLookup(ctx, rInput.Identity) if err != nil { return nil, err } // Resolve the node - node, err := pm.resolveNode(ctx, org, rInput.Node) + node, err := pm.resolveNode(ctx, identity, rInput.Node) if err != nil { return nil, err } - foundLocal = foundLocal || (node.Owner == localOrg.Identity && node.Name == pm.localNodeName) + isLocal := (node.Parent.Equals(localOrg.ID) && node.Name == pm.localNodeName) + foundLocal = foundLocal || isLocal + log.L(ctx).Debugf("Resolved group identity %s node=%s to identity %s node=%s local=%t", rInput.Identity, rInput.Node, identity.DID, node.ID, isLocal) gi.Members[i] = &fftypes.Member{ - Identity: org.GetDID(), + Identity: identity.DID, Node: node.ID, } } if !foundLocal { // Add in the local org identity - localNodeID, err := pm.resolveLocalNode(ctx, localOrg.Identity) + localNodeID, err := pm.resolveLocalNode(ctx, localOrg) if err != nil { return nil, err } gi.Members = append(gi.Members, &fftypes.Member{ - Identity: localOrgDID, + Identity: localOrg.DID, Node: localNodeID, }) } return gi, nil } -func (pm *privateMessaging) resolveLocalNode(ctx context.Context, localOrgSigningKey string) (*fftypes.UUID, error) { +func (pm *privateMessaging) resolveLocalNode(ctx context.Context, localOrg *fftypes.Identity) (*fftypes.UUID, error) { if pm.localNodeID != nil { return pm.localNodeID, nil } - fb := database.NodeQueryFactory.NewFilterLimit(ctx, 1) + fb := database.IdentityQueryFactory.NewFilterLimit(ctx, 1) filter := fb.And( - fb.Eq("owner", localOrgSigningKey), + fb.Eq("parent", localOrg.ID), + fb.Eq("type", fftypes.IdentityTypeNode), fb.Eq("name", pm.localNodeName), ) - nodes, _, err := pm.database.GetNodes(ctx, filter) + nodes, _, err := pm.database.GetIdentities(ctx, filter) if err != nil { return nil, err } diff --git a/internal/privatemessaging/recipients_test.go b/internal/privatemessaging/recipients_test.go index 1234fe89ca..47ba503e1d 100644 --- a/internal/privatemessaging/recipients_test.go +++ b/internal/privatemessaging/recipients_test.go @@ -34,29 +34,21 @@ func TestResolveMemberListNewGroupE2E(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - mdi := pm.database.(*databasemocks.Plugin) - nodeIDRemote := fftypes.NewUUID() - nodeIDLocal := fftypes.NewUUID() - orgIDLocal := fftypes.NewUUID() - orgIDRemote := fftypes.NewUUID() - - orgNameRemote := "remoteOrg" - - signingKeyLocal := "localSigningKey" - signingKeyRemote := "remoteSigningKey" - - orgDIDLocal := "did:firefly:org/" + orgIDLocal.String() - orgDIDRemote := "did:firefly:org/" + orgIDRemote.String() + localOrg := newTestOrg("localorg") + remoteOrg := newTestOrg("remoteorg") + localNode := newTestNode("node1", localOrg) + remoteNode := newTestNode("node2", remoteOrg) var dataID *fftypes.UUID - mdi.On("GetOrganizationByName", pm.ctx, orgNameRemote).Return(&fftypes.Organization{ID: orgIDRemote, Identity: signingKeyRemote}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: nodeIDRemote, Name: "node2", Owner: signingKeyRemote}}, nil, nil).Once() - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: nodeIDLocal, Name: "node1", Owner: signingKeyLocal}}, nil, nil).Once() + mdi := pm.database.(*databasemocks.Plugin) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{remoteNode}, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil).Once() mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{}, nil, nil) mdi.On("UpsertGroup", pm.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return(orgDIDLocal, nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: signingKeyLocal}, nil) + mim.On("CachedIdentityLookup", pm.ctx, "remoteorg").Return(remoteOrg, false, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localOrg, nil) ud := mdi.On("UpsertData", pm.ctx, mock.Anything, database.UpsertOptimizationNew).Return(nil) ud.RunFn = func(a mock.Arguments) { data := a[1].(*fftypes.Data) @@ -67,17 +59,17 @@ func TestResolveMemberListNewGroupE2E(t *testing.T) { assert.NoError(t, err) assert.Len(t, group.Members, 2) // Group identiy is sorted by group members DIDs so check them in that order - if orgDIDLocal < orgDIDRemote { - assert.Equal(t, orgDIDLocal, group.Members[0].Identity) - assert.Equal(t, *nodeIDLocal, *group.Members[0].Node) - assert.Equal(t, orgDIDRemote, group.Members[1].Identity) - assert.Equal(t, *nodeIDRemote, *group.Members[1].Node) + if localOrg.DID < remoteOrg.DID { + assert.Equal(t, localOrg.DID, group.Members[0].Identity) + assert.Equal(t, *localNode.ID, *group.Members[0].Node) + assert.Equal(t, remoteOrg.DID, group.Members[1].Identity) + assert.Equal(t, *remoteNode.ID, *group.Members[1].Node) assert.Nil(t, group.Ledger) } else { - assert.Equal(t, orgDIDRemote, group.Members[0].Identity) - assert.Equal(t, *nodeIDRemote, *group.Members[0].Node) - assert.Equal(t, orgDIDLocal, group.Members[1].Identity) - assert.Equal(t, *nodeIDLocal, *group.Members[1].Node) + assert.Equal(t, remoteOrg.DID, group.Members[1].Identity) + assert.Equal(t, *remoteNode.ID, *group.Members[1].Node) + assert.Equal(t, localOrg.DID, group.Members[0].Identity) + assert.Equal(t, *localNode.ID, *group.Members[0].Node) assert.Nil(t, group.Ledger) } @@ -96,14 +88,14 @@ func TestResolveMemberListNewGroupE2E(t *testing.T) { Message: fftypes.Message{ Header: fftypes.MessageHeader{ Namespace: "ns1", - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, }, Group: &fftypes.InputGroup{ Members: []fftypes.MemberInput{ - {Identity: orgNameRemote}, + {Identity: remoteOrg.Name}, }, }, }) @@ -117,20 +109,22 @@ func TestResolveMemberListExistingGroup(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("org1") + localNode := newTestNode("node1", localOrg) + mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ID: fftypes.NewUUID()}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}}, nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil) mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{ {Hash: fftypes.NewRandB32()}, }, nil, nil) mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(localOrg, false, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localNode, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -143,26 +137,26 @@ func TestResolveMemberListExistingGroup(t *testing.T) { }) assert.NoError(t, err) mdi.AssertExpectations(t) + mim.AssertExpectations(t) } -func TestResolveMemberListGetGroupsFail(t *testing.T) { +func TestResolveMemberListLookupFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ID: fftypes.NewUUID()}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}}, nil, nil) - mdi.On("GetGroups", pm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + localOrg := newTestOrg("org1") + localNode := newTestNode("node1", localOrg) + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(nil, true, fmt.Errorf("pop")) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localNode, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -173,23 +167,30 @@ func TestResolveMemberListGetGroupsFail(t *testing.T) { }, }, }) - assert.EqualError(t, err, "pop") - mdi.AssertExpectations(t) + assert.Regexp(t, "pop", err) + mim.AssertExpectations(t) } -func TestResolveMemberListLocalOrgUnregistered(t *testing.T) { +func TestResolveMemberListGetGroupsFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("org1") + localNode := newTestNode("node1", localOrg) + + mdi := pm.database.(*databasemocks.Plugin) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil) + mdi.On("GetGroups", pm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("", fmt.Errorf("pop")) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(localOrg, false, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localNode, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -201,22 +202,23 @@ func TestResolveMemberListLocalOrgUnregistered(t *testing.T) { }, }) assert.EqualError(t, err, "pop") + mdi.AssertExpectations(t) + mim.AssertExpectations(t) } -func TestResolveMemberListLocalOrgLookupFailed(t *testing.T) { +func TestResolveMemberListLocalOrgUnregistered(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(nil, fmt.Errorf("pop")) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(nil, fmt.Errorf("pop")) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -229,6 +231,8 @@ func TestResolveMemberListLocalOrgLookupFailed(t *testing.T) { }) assert.EqualError(t, err, "pop") + mim.AssertExpectations(t) + } func TestResolveMemberListMissingLocalMemberLookupFailed(t *testing.T) { @@ -236,18 +240,23 @@ func TestResolveMemberListMissingLocalMemberLookupFailed(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() + localOrg := newTestOrg("localorg") + remoteOrg := newTestOrg("remoteorg") + localNode := newTestNode("node1", localOrg) + remoteNode := newTestNode("node2", remoteOrg) + mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ID: fftypes.NewUUID()}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: fftypes.NewUUID(), Name: "node2", Owner: "org1"}}, nil, nil).Once() - mdi.On("GetNodes", pm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{remoteNode}, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, fmt.Errorf("pop")).Once() + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(localOrg, false, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localNode, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -260,6 +269,7 @@ func TestResolveMemberListMissingLocalMemberLookupFailed(t *testing.T) { }) assert.Regexp(t, "pop", err) mdi.AssertExpectations(t) + mim.AssertExpectations(t) } @@ -268,48 +278,20 @@ func TestResolveMemberListNodeNotFound(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ID: fftypes.NewUUID()}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{}, nil, nil) - mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) - - err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ - Message: fftypes.Message{ - Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ - Author: "org1", - }, - }, - }, - Group: &fftypes.InputGroup{ - Members: []fftypes.MemberInput{ - {Identity: "org1"}, - }, - }, - }) - assert.Regexp(t, "FF10233", err) - mdi.AssertExpectations(t) - -} - -func TestResolveMemberOrgNameNotFound(t *testing.T) { - - pm, cancel := newTestPrivateMessaging(t) - defer cancel() + localOrg := newTestOrg("localorg") + localNode := newTestNode("node1", localOrg) mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(nil, nil) - mdi.On("GetOrganizationByIdentity", pm.ctx, "org1").Return(nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil).Once() + mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(localOrg, false, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(localNode, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -320,8 +302,9 @@ func TestResolveMemberOrgNameNotFound(t *testing.T) { }, }, }) - assert.Regexp(t, "FF10223", err) + assert.Regexp(t, "FF10233", err) mdi.AssertExpectations(t) + mim.AssertExpectations(t) } @@ -330,21 +313,24 @@ func TestResolveMemberNodeOwnedParentOrg(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - orgID := fftypes.NewUUID() + parentOrg := newTestOrg("localorg") + childOrg := newTestOrg("org1") + childOrg.Parent = parentOrg.ID + localNode := newTestNode("node1", parentOrg) + mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(&fftypes.Organization{ID: fftypes.NewUUID(), Parent: "id-org2"}, nil) - mdi.On("GetOrganizationByIdentity", pm.ctx, "id-org2").Return(&fftypes.Organization{ID: orgID}, nil) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{}, nil, nil).Once() - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{{ID: fftypes.NewUUID(), Name: "node1", Owner: "localorg"}}, nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil).Once() + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{localNode}, nil, nil) mdi.On("GetGroups", pm.ctx, mock.Anything).Return([]*fftypes.Group{{Hash: fftypes.NewRandB32()}}, nil, nil) mim := pm.identity.(*identitymanagermocks.Manager) - mim.On("ResolveLocalOrgDID", pm.ctx).Return("localorg", nil) - mim.On("GetLocalOrganization", pm.ctx).Return(&fftypes.Organization{Identity: "localorg"}, nil) + mim.On("GetNodeOwnerOrg", pm.ctx).Return(parentOrg, nil) + mim.On("CachedIdentityLookup", pm.ctx, "org1").Return(childOrg, false, nil) + mim.On("CachedIdentityLookupByID", pm.ctx, parentOrg.ID).Return(parentOrg, nil) err := pm.resolveRecipientList(pm.ctx, &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ - Identity: fftypes.Identity{ + SignerRef: fftypes.SignerRef{ Author: "org1", }, }, @@ -357,34 +343,7 @@ func TestResolveMemberNodeOwnedParentOrg(t *testing.T) { }) assert.NoError(t, err) mdi.AssertExpectations(t) - -} - -func TestResolveOrgFail(t *testing.T) { - pm, cancel := newTestPrivateMessaging(t) - defer cancel() - - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByName", pm.ctx, "org1").Return(nil, fmt.Errorf("pop")) - - _, err := pm.resolveOrg(pm.ctx, "org1") - assert.Regexp(t, "pop", err) - mdi.AssertExpectations(t) - -} - -func TestResolveOrgByIDFail(t *testing.T) { - pm, cancel := newTestPrivateMessaging(t) - defer cancel() - - orgID := fftypes.NewUUID() - - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetOrganizationByID", pm.ctx, orgID).Return(&fftypes.Organization{ID: orgID}, nil) - - org, err := pm.resolveOrg(pm.ctx, orgID.String()) - assert.NoError(t, err) - assert.Equal(t, *orgID, *org.ID) + mim.AssertExpectations(t) } @@ -392,12 +351,12 @@ func TestGetNodeFail(t *testing.T) { pm, cancel := newTestPrivateMessaging(t) defer cancel() - mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetNode", pm.ctx, "org1", "id-node1").Return(nil, fmt.Errorf("pop")) + mim := pm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookup", pm.ctx, "id-node1").Return(nil, true, fmt.Errorf("pop")) - _, err := pm.resolveNode(pm.ctx, &fftypes.Organization{Identity: "org1"}, "id-node1") + _, err := pm.resolveNode(pm.ctx, newTestOrg("org1"), "id-node1") assert.Regexp(t, "pop", err) - mdi.AssertExpectations(t) + mim.AssertExpectations(t) } @@ -406,9 +365,15 @@ func TestResolveNodeByIDNoResult(t *testing.T) { defer cancel() mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetNodeByID", pm.ctx, mock.Anything).Return(nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) + + parentOrgID := fftypes.NewUUID() + mim := pm.identity.(*identitymanagermocks.Manager) + mim.On("CachedIdentityLookupByID", pm.ctx, parentOrgID).Return(nil, nil) - _, err := pm.resolveNode(pm.ctx, &fftypes.Organization{}, fftypes.NewUUID().String()) + childOrg := newTestOrg("test1") + childOrg.Parent = parentOrgID + _, err := pm.resolveNode(pm.ctx, childOrg, "") assert.Regexp(t, "FF10224", err) mdi.AssertExpectations(t) @@ -446,7 +411,7 @@ func TestResolveLocalNodeCached(t *testing.T) { pm.localNodeID = fftypes.NewUUID() - ni, err := pm.resolveLocalNode(pm.ctx, "localorg") + ni, err := pm.resolveLocalNode(pm.ctx, newTestOrg("localorg")) assert.NoError(t, err) assert.Equal(t, pm.localNodeID, ni) } @@ -456,9 +421,9 @@ func TestResolveLocalNodeNotFound(t *testing.T) { defer cancel() mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return([]*fftypes.Node{}, nil, nil) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return([]*fftypes.Identity{}, nil, nil) - _, err := pm.resolveLocalNode(pm.ctx, "localorg") + _, err := pm.resolveLocalNode(pm.ctx, newTestOrg("localorg")) assert.Regexp(t, "FF10225", err) } @@ -467,8 +432,8 @@ func TestResolveLocalNodeNotError(t *testing.T) { defer cancel() mdi := pm.database.(*databasemocks.Plugin) - mdi.On("GetNodes", pm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) + mdi.On("GetIdentities", pm.ctx, mock.Anything).Return(nil, nil, fmt.Errorf("pop")) - _, err := pm.resolveLocalNode(pm.ctx, "localorg") + _, err := pm.resolveLocalNode(pm.ctx, newTestOrg("localorg")) assert.EqualError(t, err, "pop") } diff --git a/internal/syncasync/sync_async_bridge.go b/internal/syncasync/sync_async_bridge.go index 0de6d84c95..5181535701 100644 --- a/internal/syncasync/sync_async_bridge.go +++ b/internal/syncasync/sync_async_bridge.go @@ -44,6 +44,8 @@ type Bridge interface { WaitForReply(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.MessageInOut, error) // WaitForMessage waits for a message with the supplied ID WaitForMessage(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.Message, error) + // WaitForIdentity waits for an identity with the supplied ID + WaitForIdentity(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.Identity, error) // WaitForTokenPool waits for a token pool with the supplied ID WaitForTokenPool(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.TokenPool, error) // WaitForTokenTransfer waits for a token transfer with the supplied ID @@ -59,6 +61,7 @@ type requestType int const ( messageConfirm requestType = iota messageReply + identityConfirm tokenPoolConfirm tokenTransferConfirm tokenApproveConfirm @@ -128,6 +131,9 @@ func (sa *syncAsyncBridge) addInFlight(ns string, id *fftypes.UUID, reqType requ } func (sa *syncAsyncBridge) getInFlight(ns string, reqType requestType, id *fftypes.UUID) *inflightRequest { + if id == nil { + return nil + } inflightNS := sa.inflight[ns] if inflightNS != nil && id != nil { inflight := inflightNS[*id] @@ -165,6 +171,17 @@ func (sa *syncAsyncBridge) getMessageFromEvent(event *fftypes.EventDelivery) (ms return msg, nil } +func (sa *syncAsyncBridge) getIdentityFromEvent(event *fftypes.EventDelivery) (identity *fftypes.Identity, err error) { + if identity, err = sa.database.GetIdentityByID(sa.ctx, event.Reference); err != nil { + return nil, err + } + if identity == nil { + // This should not happen (but we need to move on) + log.L(sa.ctx).Errorf("Unable to resolve identity '%s' for %s event '%s'", event.Reference, event.Type, event.ID) + } + return identity, nil +} + func (sa *syncAsyncBridge) getPoolFromEvent(event *fftypes.EventDelivery) (pool *fftypes.TokenPool, err error) { if pool, err = sa.database.GetTokenPoolByID(sa.ctx, event.Reference); err != nil { return nil, err @@ -225,18 +242,23 @@ func (sa *syncAsyncBridge) getOperationFromEvent(event *fftypes.EventDelivery) ( } func (sa *syncAsyncBridge) handleMessageConfirmedEvent(event *fftypes.EventDelivery) error { + + // See if the CID marks this as a reply to an inflight message + inflight := sa.getInFlight(event.Namespace, messageConfirm, event.Reference) + inflightReply := sa.getInFlight(event.Namespace, messageReply, event.Correlator) + + if inflightReply == nil && inflight == nil { + return nil + } + msg, err := sa.getMessageFromEvent(event) if err != nil || msg == nil { return err } - // See if the CID marks this as a reply to an inflight message - inflightReply := sa.getInFlight(event.Namespace, messageReply, msg.Header.CID) + if inflightReply != nil { go sa.resolveReply(inflightReply, msg) } - - // See if this is a confirmation of the delivery of an inflight message - inflight := sa.getInFlight(event.Namespace, messageConfirm, msg.Header.ID) if inflight != nil { go sa.resolveConfirmed(inflight, msg) } @@ -245,74 +267,111 @@ func (sa *syncAsyncBridge) handleMessageConfirmedEvent(event *fftypes.EventDeliv } func (sa *syncAsyncBridge) handleMessageRejectedEvent(event *fftypes.EventDelivery) error { + + // See if this is a rejection of an inflight message + inflight := sa.getInFlight(event.Namespace, messageConfirm, event.Reference) + inflightPool := sa.getInFlight(event.Namespace, tokenPoolConfirm, event.Correlator) + + if inflight == nil && inflightPool == nil { + return nil + } + msg, err := sa.getMessageFromEvent(event) if err != nil || msg == nil { return err } - // See if this is a rejection of an inflight message - inflight := sa.getInFlight(event.Namespace, messageConfirm, msg.Header.ID) if inflight != nil { go sa.resolveRejected(inflight, msg.Header.ID) } + // See if this is a rejection of an inflight token pool - if msg.Header.Type == fftypes.MessageTypeDefinition && msg.Header.Tag == string(fftypes.SystemTagDefinePool) { + if inflightPool != nil { if pool, err := sa.getPoolFromMessage(msg); err != nil { return err } else if pool != nil { - inflight := sa.getInFlight(event.Namespace, tokenPoolConfirm, pool.ID) - if inflight != nil { - go sa.resolveRejectedTokenPool(inflight, pool.ID) - } + go sa.resolveRejectedTokenPool(inflightPool, pool.ID) } } return nil } +func (sa *syncAsyncBridge) handleIdentityConfirmedEvent(event *fftypes.EventDelivery) error { + // See if the CID marks this as a reply to an inflight identity + inflightReply := sa.getInFlight(event.Namespace, identityConfirm, event.Reference) + if inflightReply == nil { + return nil + } + + identity, err := sa.getIdentityFromEvent(event) + if err != nil || identity == nil { + return err + } + + go sa.resolveIdentity(inflightReply, identity) + + return nil +} + func (sa *syncAsyncBridge) handlePoolConfirmedEvent(event *fftypes.EventDelivery) error { + // See if this is a confirmation of an inflight token pool + inflight := sa.getInFlight(event.Namespace, tokenPoolConfirm, event.Reference) + if inflight == nil { + return nil + } + pool, err := sa.getPoolFromEvent(event) if err != nil || pool == nil { return err } - // See if this is a confirmation of an inflight token pool - inflight := sa.getInFlight(event.Namespace, tokenPoolConfirm, pool.ID) - if inflight != nil { - go sa.resolveConfirmedTokenPool(inflight, pool) - } + + go sa.resolveConfirmedTokenPool(inflight, pool) return nil } func (sa *syncAsyncBridge) handleTransferConfirmedEvent(event *fftypes.EventDelivery) error { + // See if this is a confirmation of an inflight token transfer + inflight := sa.getInFlight(event.Namespace, tokenTransferConfirm, event.Reference) + if inflight == nil { + return nil + } + transfer, err := sa.getTransferFromEvent(event) if err != nil || transfer == nil { return err } - // See if this is a confirmation of an inflight token transfer - inflight := sa.getInFlight(event.Namespace, tokenTransferConfirm, transfer.LocalID) - if inflight != nil { - go sa.resolveConfirmedTokenTransfer(inflight, transfer) - } + + go sa.resolveConfirmedTokenTransfer(inflight, transfer) return nil } func (sa *syncAsyncBridge) handleApprovalConfirmedEvent(event *fftypes.EventDelivery) error { + + // See if this is a confirmation of an inflight token approval + inflight := sa.getInFlight(event.Namespace, tokenApproveConfirm, event.Reference) + if inflight == nil { + return nil + } + approval, err := sa.getApprovalFromEvent(event) if err != nil || approval == nil { return err } - // See if this is a confirmation of an inflight token approval - inflight := sa.getInFlight(event.Namespace, tokenApproveConfirm, approval.LocalID) - if inflight != nil { - go sa.resolveConfirmedTokenApproval(inflight, approval) - } + go sa.resolveConfirmedTokenApproval(inflight, approval) return nil } func (sa *syncAsyncBridge) handleTransferOpFailedEvent(event *fftypes.EventDelivery) error { + // See if this is a failure of an inflight token transfer operation + inflight := sa.getInFlight(event.Namespace, tokenTransferConfirm, event.Correlator) + if inflight == nil { + return nil + } + op, err := sa.getOperationFromEvent(event) if err != nil || op == nil { return err @@ -322,16 +381,19 @@ func (sa *syncAsyncBridge) handleTransferOpFailedEvent(event *fftypes.EventDeliv if err := txcommon.RetrieveTokenTransferInputs(sa.ctx, op, &transfer); err != nil { log.L(sa.ctx).Warnf("Failed to extract token transfer inputs for operation '%s': %s", op.ID, err) } - // See if this is a failure of an inflight token transfer operation - inflight := sa.getInFlight(event.Namespace, tokenTransferConfirm, transfer.LocalID) - if inflight != nil { - go sa.resolveFailedTokenTransfer(inflight, transfer.LocalID) - } + + go sa.resolveFailedTokenTransfer(inflight, transfer.LocalID) return nil } func (sa *syncAsyncBridge) handleApprovalOpFailedEvent(event *fftypes.EventDelivery) error { + // See if this is a failure of an inflight token approval operation + inflight := sa.getInFlight(event.Namespace, tokenApproveConfirm, event.Correlator) + if inflight == nil { + return nil + } + op, err := sa.getOperationFromEvent(event) if err != nil || op == nil { return err @@ -341,11 +403,8 @@ func (sa *syncAsyncBridge) handleApprovalOpFailedEvent(event *fftypes.EventDeliv if err := txcommon.RetrieveTokenApprovalInputs(sa.ctx, op, &approval); err != nil { log.L(sa.ctx).Warnf("Failed to extract token approval inputs for operation '%s': %s", op.ID, err) } - // See if this is a failure of an inflight token transfer operation - inflight := sa.getInFlight(event.Namespace, tokenApproveConfirm, approval.LocalID) - if inflight != nil { - go sa.resolveFailedTokenTransfer(inflight, approval.LocalID) - } + + go sa.resolveFailedTokenApproval(inflight, approval.LocalID) return nil } @@ -367,6 +426,9 @@ func (sa *syncAsyncBridge) eventCallback(event *fftypes.EventDelivery) error { case fftypes.EventTypeMessageRejected: return sa.handleMessageRejectedEvent(event) + case fftypes.EventTypeIdentityConfirmed: + return sa.handleIdentityConfirmedEvent(event) + case fftypes.EventTypePoolConfirmed: return sa.handlePoolConfirmedEvent(event) @@ -410,6 +472,11 @@ func (sa *syncAsyncBridge) resolveRejected(inflight *inflightRequest, msgID *fft inflight.response <- inflightResponse{err: err} } +func (sa *syncAsyncBridge) resolveIdentity(inflight *inflightRequest, identity *fftypes.Identity) { + log.L(sa.ctx).Debugf("Resolving identity creation '%s' with ID '%s'", inflight.id, identity.ID) + inflight.response <- inflightResponse{id: identity.ID, data: identity} +} + func (sa *syncAsyncBridge) resolveConfirmedTokenPool(inflight *inflightRequest, pool *fftypes.TokenPool) { log.L(sa.ctx).Debugf("Resolving token pool confirmation request '%s' with ID '%s'", inflight.id, pool.ID) inflight.response <- inflightResponse{id: pool.ID, data: pool} @@ -437,6 +504,12 @@ func (sa *syncAsyncBridge) resolveFailedTokenTransfer(inflight *inflightRequest, inflight.response <- inflightResponse{err: err} } +func (sa *syncAsyncBridge) resolveFailedTokenApproval(inflight *inflightRequest, transferID *fftypes.UUID) { + err := i18n.NewError(sa.ctx, i18n.MsgTokenApprovalFailed, transferID) + log.L(sa.ctx).Debugf("Resolving token approval request '%s' with error '%s'", inflight.id, err) + inflight.response <- inflightResponse{err: err} +} + func (sa *syncAsyncBridge) sendAndWait(ctx context.Context, ns string, id *fftypes.UUID, reqType requestType, send RequestSender) (interface{}, error) { inflight, err := sa.addInFlight(ns, id, reqType) if err != nil { @@ -483,6 +556,14 @@ func (sa *syncAsyncBridge) WaitForMessage(ctx context.Context, ns string, id *ff return reply.(*fftypes.Message), err } +func (sa *syncAsyncBridge) WaitForIdentity(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.Identity, error) { + reply, err := sa.sendAndWait(ctx, ns, id, identityConfirm, send) + if err != nil { + return nil, err + } + return reply.(*fftypes.Identity), err +} + func (sa *syncAsyncBridge) WaitForTokenPool(ctx context.Context, ns string, id *fftypes.UUID, send RequestSender) (*fftypes.TokenPool, error) { reply, err := sa.sendAndWait(ctx, ns, id, tokenPoolConfirm, send) if err != nil { diff --git a/internal/syncasync/sync_async_bridge_test.go b/internal/syncasync/sync_async_bridge_test.go index a47c8292b5..4aacfa1674 100644 --- a/internal/syncasync/sync_async_bridge_test.go +++ b/internal/syncasync/sync_async_bridge_test.go @@ -77,10 +77,11 @@ func TestRequestReplyOk(t *testing.T) { go func() { sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeMessageConfirmed, - Reference: replyID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeMessageConfirmed, + Reference: replyID, + Correlator: requestID, + Namespace: "ns1", }, }) }() @@ -216,6 +217,9 @@ func TestEventCallbackNotInflight(t *testing.T) { sa, cancel := newTestSyncAsyncBridge(t) defer cancel() + mse := sa.sysevents.(*sysmessagingmocks.SystemEvents) + mse.On("AddSystemEventListener", "ns1", mock.Anything).Return(nil) + err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ Namespace: "ns1", @@ -226,6 +230,29 @@ func TestEventCallbackNotInflight(t *testing.T) { }) assert.NoError(t, err) + sa.addInFlight("ns1", fftypes.NewUUID(), messageConfirm) + + for _, eventType := range []fftypes.EventType{ + fftypes.EventTypeMessageConfirmed, + fftypes.EventTypeMessageRejected, + fftypes.EventTypePoolConfirmed, + fftypes.EventTypeTransferConfirmed, + fftypes.EventTypeApprovalConfirmed, + fftypes.EventTypeTransferOpFailed, + fftypes.EventTypeApprovalOpFailed, + fftypes.EventTypeIdentityConfirmed, + } { + err := sa.eventCallback(&fftypes.EventDelivery{ + Event: fftypes.Event{ + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: fftypes.NewUUID(), + Type: eventType, + }, + }) + assert.NoError(t, err) + + } } func TestEventCallbackWrongType(t *testing.T) { @@ -245,7 +272,7 @@ func TestEventCallbackWrongType(t *testing.T) { Namespace: "ns1", ID: fftypes.NewUUID(), Reference: fftypes.NewUUID(), - Type: fftypes.EventTypeGroupConfirmed, + Type: fftypes.EventTypeIdentityUpdated, // We use the message for this one, so no sync/async handler }, }) assert.NoError(t, err) @@ -260,7 +287,9 @@ func TestEventCallbackMsgLookupFail(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: messageConfirm, + }, }, } @@ -271,7 +300,7 @@ func TestEventCallbackMsgLookupFail(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeMessageConfirmed, }, }) @@ -287,7 +316,9 @@ func TestEventCallbackTokenPoolLookupFail(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenPoolConfirm, + }, }, } @@ -298,7 +329,7 @@ func TestEventCallbackTokenPoolLookupFail(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypePoolConfirmed, }, }) @@ -306,6 +337,64 @@ func TestEventCallbackTokenPoolLookupFail(t *testing.T) { } +func TestEventCallbackIdentityLookupFail(t *testing.T) { + + sa, cancel := newTestSyncAsyncBridge(t) + defer cancel() + + responseID := fftypes.NewUUID() + sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ + "ns1": { + *responseID: &inflightRequest{ + reqType: identityConfirm, + }, + }, + } + + mdi := sa.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", sa.ctx, mock.Anything).Return(nil, fmt.Errorf("pop")) + + err := sa.eventCallback(&fftypes.EventDelivery{ + Event: fftypes.Event{ + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: responseID, + Type: fftypes.EventTypeIdentityConfirmed, + }, + }) + assert.EqualError(t, err, "pop") + +} + +func TestEventCallbackIdentityLookupNotFound(t *testing.T) { + + sa, cancel := newTestSyncAsyncBridge(t) + defer cancel() + + responseID := fftypes.NewUUID() + sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ + "ns1": { + *responseID: &inflightRequest{ + reqType: identityConfirm, + }, + }, + } + + mdi := sa.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", sa.ctx, mock.Anything).Return(nil, nil) + + err := sa.eventCallback(&fftypes.EventDelivery{ + Event: fftypes.Event{ + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: responseID, + Type: fftypes.EventTypeIdentityConfirmed, + }, + }) + assert.NoError(t, err) + +} + func TestEventCallbackTokenTransferLookupFail(t *testing.T) { sa, cancel := newTestSyncAsyncBridge(t) @@ -314,7 +403,9 @@ func TestEventCallbackTokenTransferLookupFail(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, }, } @@ -325,7 +416,7 @@ func TestEventCallbackTokenTransferLookupFail(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeTransferConfirmed, }, }) @@ -340,7 +431,9 @@ func TestEventCallbackTokenApprovalLookupFail(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenApproveConfirm, + }, }, } @@ -351,7 +444,7 @@ func TestEventCallbackTokenApprovalLookupFail(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeApprovalConfirmed, }, }) @@ -367,7 +460,9 @@ func TestEventCallbackMsgNotFound(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: messageConfirm, + }, }, } @@ -378,7 +473,7 @@ func TestEventCallbackMsgNotFound(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeMessageConfirmed, }, }) @@ -393,9 +488,15 @@ func TestEventCallbackRejectedMsgNotFound(t *testing.T) { defer cancel() responseID := fftypes.NewUUID() + correlationID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: messageConfirm, + }, + *correlationID: &inflightRequest{ + reqType: tokenPoolConfirm, + }, }, } @@ -404,10 +505,11 @@ func TestEventCallbackRejectedMsgNotFound(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - Namespace: "ns1", - ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), - Type: fftypes.EventTypeMessageRejected, + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: responseID, + Correlator: correlationID, + Type: fftypes.EventTypeMessageRejected, }, }) assert.NoError(t, err) @@ -423,7 +525,9 @@ func TestEventCallbackTokenPoolNotFound(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenPoolConfirm, + }, }, } @@ -434,7 +538,7 @@ func TestEventCallbackTokenPoolNotFound(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypePoolConfirmed, }, }) @@ -451,7 +555,9 @@ func TestEventCallbackTokenTransferNotFound(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, }, } @@ -462,7 +568,7 @@ func TestEventCallbackTokenTransferNotFound(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeTransferConfirmed, }, }) @@ -479,7 +585,9 @@ func TestEventCallbackTokenApprovalNotFound(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenApproveConfirm, + }, }, } @@ -490,7 +598,7 @@ func TestEventCallbackTokenApprovalNotFound(t *testing.T) { Event: fftypes.Event{ Namespace: "ns1", ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), + Reference: responseID, Type: fftypes.EventTypeApprovalConfirmed, }, }) @@ -507,7 +615,9 @@ func TestEventCallbackTokenPoolRejectedNoData(t *testing.T) { responseID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: tokenPoolConfirm, + }, }, } @@ -515,7 +625,7 @@ func TestEventCallbackTokenPoolRejectedNoData(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Type: fftypes.MessageTypeDefinition, - Tag: string(fftypes.SystemTagDefinePool), + Tag: fftypes.SystemTagDefinePool, }, Data: fftypes.DataRefs{}, } @@ -525,10 +635,11 @@ func TestEventCallbackTokenPoolRejectedNoData(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - Namespace: "ns1", - ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), - Type: fftypes.EventTypeMessageRejected, + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: fftypes.NewUUID(), + Correlator: responseID, + Type: fftypes.EventTypeMessageRejected, }, }) assert.NoError(t, err) @@ -542,9 +653,15 @@ func TestEventCallbackTokenPoolRejectedDataError(t *testing.T) { defer cancel() responseID := fftypes.NewUUID() + correlationID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *responseID: &inflightRequest{}, + *responseID: &inflightRequest{ + reqType: messageConfirm, + }, + *correlationID: &inflightRequest{ + reqType: tokenPoolConfirm, + }, }, } @@ -553,7 +670,7 @@ func TestEventCallbackTokenPoolRejectedDataError(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Type: fftypes.MessageTypeDefinition, - Tag: string(fftypes.SystemTagDefinePool), + Tag: fftypes.SystemTagDefinePool, }, Data: fftypes.DataRefs{ {ID: dataID}, @@ -566,10 +683,11 @@ func TestEventCallbackTokenPoolRejectedDataError(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - Namespace: "ns1", - ID: fftypes.NewUUID(), - Reference: fftypes.NewUUID(), - Type: fftypes.EventTypeMessageRejected, + Namespace: "ns1", + ID: fftypes.NewUUID(), + Reference: responseID, + Correlator: correlationID, + Type: fftypes.EventTypeMessageRejected, }, }) assert.EqualError(t, err, "pop") @@ -668,7 +786,7 @@ func TestAwaitTokenPoolConfirmationRejected(t *testing.T) { Header: fftypes.MessageHeader{ ID: fftypes.NewUUID(), Type: fftypes.MessageTypeDefinition, - Tag: string(fftypes.SystemTagDefinePool), + Tag: fftypes.SystemTagDefinePool, }, Data: fftypes.DataRefs{ {ID: data.ID}, @@ -686,10 +804,11 @@ func TestAwaitTokenPoolConfirmationRejected(t *testing.T) { go func() { sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeMessageRejected, - Reference: msg.Header.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeMessageRejected, + Reference: msg.Header.ID, + Correlator: pool.Pool.ID, + Namespace: "ns1", }, }) }() @@ -818,6 +937,13 @@ func TestAwaitFailedTokenTransfer(t *testing.T) { "localId": requestID.String(), }, } + sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ + "ns1": { + *requestID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, + }, + } mse := sa.sysevents.(*sysmessagingmocks.SystemEvents) mse.On("AddSystemEventListener", "ns1", mock.Anything).Return(nil) @@ -829,10 +955,11 @@ func TestAwaitFailedTokenTransfer(t *testing.T) { go func() { sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeTransferOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeTransferOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) }() @@ -864,16 +991,17 @@ func TestAwaitFailedTokenApproval(t *testing.T) { go func() { sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeApprovalOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeApprovalOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) }() return nil }) - assert.Regexp(t, "FF10291", err) + assert.Regexp(t, "FF10369", err) } func TestFailedTokenTransferOpError(t *testing.T) { @@ -884,7 +1012,9 @@ func TestFailedTokenTransferOpError(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, }, } @@ -900,10 +1030,11 @@ func TestFailedTokenTransferOpError(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeTransferOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeTransferOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.EqualError(t, err, "pop") @@ -919,7 +1050,9 @@ func TestFailedTokenApprovalOpError(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenApproveConfirm, + }, }, } @@ -935,10 +1068,11 @@ func TestFailedTokenApprovalOpError(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeApprovalOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeApprovalOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.EqualError(t, err, "pop") @@ -954,7 +1088,9 @@ func TestFailedTokenApprovalOpNotFound(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenApproveConfirm, + }, }, } @@ -970,10 +1106,11 @@ func TestFailedTokenApprovalOpNotFound(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeApprovalOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeApprovalOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.NoError(t, err) @@ -989,7 +1126,9 @@ func TestFailedTokenApprovalIDLookupFail(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenApproveConfirm, + }, }, } @@ -1003,10 +1142,11 @@ func TestFailedTokenApprovalIDLookupFail(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeApprovalOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeApprovalOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.NoError(t, err) @@ -1022,7 +1162,9 @@ func TestFailedTokenTransferOpNotFound(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, }, } @@ -1038,10 +1180,11 @@ func TestFailedTokenTransferOpNotFound(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeTransferOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeTransferOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.NoError(t, err) @@ -1057,7 +1200,9 @@ func TestFailedTokenTransferIDLookupFail(t *testing.T) { requestID := fftypes.NewUUID() sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ "ns1": { - *requestID: &inflightRequest{}, + *requestID: &inflightRequest{ + reqType: tokenTransferConfirm, + }, }, } @@ -1071,13 +1216,72 @@ func TestFailedTokenTransferIDLookupFail(t *testing.T) { err := sa.eventCallback(&fftypes.EventDelivery{ Event: fftypes.Event{ - ID: fftypes.NewUUID(), - Type: fftypes.EventTypeTransferOpFailed, - Reference: op.ID, - Namespace: "ns1", + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeTransferOpFailed, + Reference: op.ID, + Correlator: requestID, + Namespace: "ns1", }, }) assert.NoError(t, err) mdi.AssertExpectations(t) } + +func TestAwaitIdentityConfirmed(t *testing.T) { + + sa, cancel := newTestSyncAsyncBridge(t) + defer cancel() + + requestID := fftypes.NewUUID() + identity := &fftypes.Identity{ + IdentityBase: fftypes.IdentityBase{ + ID: requestID, + }, + } + sa.inflight = map[string]map[fftypes.UUID]*inflightRequest{ + "ns1": { + *requestID: &inflightRequest{ + reqType: identityConfirm, + }, + }, + } + + mse := sa.sysevents.(*sysmessagingmocks.SystemEvents) + mse.On("AddSystemEventListener", "ns1", mock.Anything).Return(nil) + + mdi := sa.database.(*databasemocks.Plugin) + mdi.On("GetIdentityByID", sa.ctx, requestID).Return(identity, nil) + + retIdentity, err := sa.WaitForIdentity(sa.ctx, "ns1", requestID, func(ctx context.Context) error { + go func() { + sa.eventCallback(&fftypes.EventDelivery{ + Event: fftypes.Event{ + ID: fftypes.NewUUID(), + Type: fftypes.EventTypeIdentityConfirmed, + Reference: requestID, + Namespace: "ns1", + }, + }) + }() + return nil + }) + assert.NoError(t, err) + assert.Equal(t, retIdentity, identity) +} + +func TestAwaitIdentityFail(t *testing.T) { + + sa, cancel := newTestSyncAsyncBridge(t) + defer cancel() + + requestID := fftypes.NewUUID() + + mse := sa.sysevents.(*sysmessagingmocks.SystemEvents) + mse.On("AddSystemEventListener", "ns1", mock.Anything).Return(nil) + + _, err := sa.WaitForIdentity(sa.ctx, "ns1", requestID, func(ctx context.Context) error { + return fmt.Errorf("pop") + }) + assert.Regexp(t, "pop", err) +} diff --git a/manifest.json b/manifest.json index bf38872904..a4da50dd10 100644 --- a/manifest.json +++ b/manifest.json @@ -44,6 +44,6 @@ "release": "v0.5.0_8cb358c" }, "cli": { - "tag": "v0.0.43" + "tag": "v0.0.44" } } diff --git a/mocks/blockchainmocks/callbacks.go b/mocks/blockchainmocks/callbacks.go index 9017ab6241..9f4b5d0138 100644 --- a/mocks/blockchainmocks/callbacks.go +++ b/mocks/blockchainmocks/callbacks.go @@ -14,13 +14,13 @@ type Callbacks struct { mock.Mock } -// BatchPinComplete provides a mock function with given fields: batch, signingIdentity -func (_m *Callbacks) BatchPinComplete(batch *blockchain.BatchPin, signingIdentity string) error { - ret := _m.Called(batch, signingIdentity) +// BatchPinComplete provides a mock function with given fields: batch, signingKey +func (_m *Callbacks) BatchPinComplete(batch *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { + ret := _m.Called(batch, signingKey) var r0 error - if rf, ok := ret.Get(0).(func(*blockchain.BatchPin, string) error); ok { - r0 = rf(batch, signingIdentity) + if rf, ok := ret.Get(0).(func(*blockchain.BatchPin, *fftypes.VerifierRef) error); ok { + r0 = rf(batch, signingKey) } else { r0 = ret.Error(0) } diff --git a/mocks/blockchainmocks/plugin.go b/mocks/blockchainmocks/plugin.go index 2fcec3f94a..b57bff6c43 100644 --- a/mocks/blockchainmocks/plugin.go +++ b/mocks/blockchainmocks/plugin.go @@ -155,22 +155,20 @@ func (_m *Plugin) Name() string { return r0 } -// QueryContract provides a mock function with given fields: ctx, location, method, input -func (_m *Plugin) QueryContract(ctx context.Context, location *fftypes.JSONAny, method *fftypes.FFIMethod, input map[string]interface{}) (interface{}, error) { - ret := _m.Called(ctx, location, method, input) +// NormalizeSigningKey provides a mock function with given fields: ctx, keyRef +func (_m *Plugin) NormalizeSigningKey(ctx context.Context, keyRef string) (string, error) { + ret := _m.Called(ctx, keyRef) - var r0 interface{} - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.JSONAny, *fftypes.FFIMethod, map[string]interface{}) interface{}); ok { - r0 = rf(ctx, location, method, input) + var r0 string + if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { + r0 = rf(ctx, keyRef) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(interface{}) - } + r0 = ret.Get(0).(string) } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.JSONAny, *fftypes.FFIMethod, map[string]interface{}) error); ok { - r1 = rf(ctx, location, method, input) + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, keyRef) } else { r1 = ret.Error(1) } @@ -178,20 +176,22 @@ func (_m *Plugin) QueryContract(ctx context.Context, location *fftypes.JSONAny, return r0, r1 } -// ResolveSigningKey provides a mock function with given fields: ctx, signingKey -func (_m *Plugin) ResolveSigningKey(ctx context.Context, signingKey string) (string, error) { - ret := _m.Called(ctx, signingKey) +// QueryContract provides a mock function with given fields: ctx, location, method, input +func (_m *Plugin) QueryContract(ctx context.Context, location *fftypes.JSONAny, method *fftypes.FFIMethod, input map[string]interface{}) (interface{}, error) { + ret := _m.Called(ctx, location, method, input) - var r0 string - if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { - r0 = rf(ctx, signingKey) + var r0 interface{} + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.JSONAny, *fftypes.FFIMethod, map[string]interface{}) interface{}); ok { + r0 = rf(ctx, location, method, input) } else { - r0 = ret.Get(0).(string) + if ret.Get(0) != nil { + r0 = ret.Get(0).(interface{}) + } } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, signingKey) + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.JSONAny, *fftypes.FFIMethod, map[string]interface{}) error); ok { + r1 = rf(ctx, location, method, input) } else { r1 = ret.Error(1) } @@ -226,3 +226,17 @@ func (_m *Plugin) SubmitBatchPin(ctx context.Context, operationID *fftypes.UUID, return r0 } + +// VerifierType provides a mock function with given fields: +func (_m *Plugin) VerifierType() fftypes.FFEnum { + ret := _m.Called() + + var r0 fftypes.FFEnum + if rf, ok := ret.Get(0).(func() fftypes.FFEnum); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(fftypes.FFEnum) + } + + return r0 +} diff --git a/mocks/broadcastmocks/manager.go b/mocks/broadcastmocks/manager.go index f419341a4f..e47d84e439 100644 --- a/mocks/broadcastmocks/manager.go +++ b/mocks/broadcastmocks/manager.go @@ -40,11 +40,11 @@ func (_m *Manager) BroadcastDatatype(ctx context.Context, ns string, datatype *f } // BroadcastDefinition provides a mock function with given fields: ctx, ns, def, signingIdentity, tag, waitConfirm -func (_m *Manager) BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (*fftypes.Message, error) { +func (_m *Manager) BroadcastDefinition(ctx context.Context, ns string, def fftypes.Definition, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (*fftypes.Message, error) { ret := _m.Called(ctx, ns, def, signingIdentity, tag, waitConfirm) var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, string, fftypes.Definition, *fftypes.Identity, fftypes.SystemTag, bool) *fftypes.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, fftypes.Definition, *fftypes.SignerRef, string, bool) *fftypes.Message); ok { r0 = rf(ctx, ns, def, signingIdentity, tag, waitConfirm) } else { if ret.Get(0) != nil { @@ -53,7 +53,7 @@ func (_m *Manager) BroadcastDefinition(ctx context.Context, ns string, def fftyp } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, fftypes.Definition, *fftypes.Identity, fftypes.SystemTag, bool) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, string, fftypes.Definition, *fftypes.SignerRef, string, bool) error); ok { r1 = rf(ctx, ns, def, signingIdentity, tag, waitConfirm) } else { r1 = ret.Error(1) @@ -63,11 +63,11 @@ func (_m *Manager) BroadcastDefinition(ctx context.Context, ns string, def fftyp } // BroadcastDefinitionAsNode provides a mock function with given fields: ctx, ns, def, tag, waitConfirm -func (_m *Manager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag fftypes.SystemTag, waitConfirm bool) (*fftypes.Message, error) { +func (_m *Manager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def fftypes.Definition, tag string, waitConfirm bool) (*fftypes.Message, error) { ret := _m.Called(ctx, ns, def, tag, waitConfirm) var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, string, fftypes.Definition, fftypes.SystemTag, bool) *fftypes.Message); ok { + if rf, ok := ret.Get(0).(func(context.Context, string, fftypes.Definition, string, bool) *fftypes.Message); ok { r0 = rf(ctx, ns, def, tag, waitConfirm) } else { if ret.Get(0) != nil { @@ -76,7 +76,7 @@ func (_m *Manager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, fftypes.Definition, fftypes.SystemTag, bool) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, string, fftypes.Definition, string, bool) error); ok { r1 = rf(ctx, ns, def, tag, waitConfirm) } else { r1 = ret.Error(1) @@ -85,13 +85,13 @@ func (_m *Manager) BroadcastDefinitionAsNode(ctx context.Context, ns string, def return r0, r1 } -// BroadcastMessage provides a mock function with given fields: ctx, ns, in, waitConfirm -func (_m *Manager) BroadcastMessage(ctx context.Context, ns string, in *fftypes.MessageInOut, waitConfirm bool) (*fftypes.Message, error) { - ret := _m.Called(ctx, ns, in, waitConfirm) +// BroadcastIdentityClaim provides a mock function with given fields: ctx, ns, def, signingIdentity, tag, waitConfirm +func (_m *Manager) BroadcastIdentityClaim(ctx context.Context, ns string, def *fftypes.IdentityClaim, signingIdentity *fftypes.SignerRef, tag string, waitConfirm bool) (*fftypes.Message, error) { + ret := _m.Called(ctx, ns, def, signingIdentity, tag, waitConfirm) var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.MessageInOut, bool) *fftypes.Message); ok { - r0 = rf(ctx, ns, in, waitConfirm) + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.IdentityClaim, *fftypes.SignerRef, string, bool) *fftypes.Message); ok { + r0 = rf(ctx, ns, def, signingIdentity, tag, waitConfirm) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*fftypes.Message) @@ -99,8 +99,8 @@ func (_m *Manager) BroadcastMessage(ctx context.Context, ns string, in *fftypes. } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.MessageInOut, bool) error); ok { - r1 = rf(ctx, ns, in, waitConfirm) + if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.IdentityClaim, *fftypes.SignerRef, string, bool) error); ok { + r1 = rf(ctx, ns, def, signingIdentity, tag, waitConfirm) } else { r1 = ret.Error(1) } @@ -108,13 +108,13 @@ func (_m *Manager) BroadcastMessage(ctx context.Context, ns string, in *fftypes. return r0, r1 } -// BroadcastNamespace provides a mock function with given fields: ctx, ns, waitConfirm -func (_m *Manager) BroadcastNamespace(ctx context.Context, ns *fftypes.Namespace, waitConfirm bool) (*fftypes.Message, error) { - ret := _m.Called(ctx, ns, waitConfirm) +// BroadcastMessage provides a mock function with given fields: ctx, ns, in, waitConfirm +func (_m *Manager) BroadcastMessage(ctx context.Context, ns string, in *fftypes.MessageInOut, waitConfirm bool) (*fftypes.Message, error) { + ret := _m.Called(ctx, ns, in, waitConfirm) var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Namespace, bool) *fftypes.Message); ok { - r0 = rf(ctx, ns, waitConfirm) + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.MessageInOut, bool) *fftypes.Message); ok { + r0 = rf(ctx, ns, in, waitConfirm) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*fftypes.Message) @@ -122,8 +122,8 @@ func (_m *Manager) BroadcastNamespace(ctx context.Context, ns *fftypes.Namespace } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Namespace, bool) error); ok { - r1 = rf(ctx, ns, waitConfirm) + if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.MessageInOut, bool) error); ok { + r1 = rf(ctx, ns, in, waitConfirm) } else { r1 = ret.Error(1) } @@ -131,13 +131,13 @@ func (_m *Manager) BroadcastNamespace(ctx context.Context, ns *fftypes.Namespace return r0, r1 } -// BroadcastRootOrgDefinition provides a mock function with given fields: ctx, def, signingIdentity, tag, waitConfirm -func (_m *Manager) BroadcastRootOrgDefinition(ctx context.Context, def *fftypes.Organization, signingIdentity *fftypes.Identity, tag fftypes.SystemTag, waitConfirm bool) (*fftypes.Message, error) { - ret := _m.Called(ctx, def, signingIdentity, tag, waitConfirm) +// BroadcastNamespace provides a mock function with given fields: ctx, ns, waitConfirm +func (_m *Manager) BroadcastNamespace(ctx context.Context, ns *fftypes.Namespace, waitConfirm bool) (*fftypes.Message, error) { + ret := _m.Called(ctx, ns, waitConfirm) var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Organization, *fftypes.Identity, fftypes.SystemTag, bool) *fftypes.Message); ok { - r0 = rf(ctx, def, signingIdentity, tag, waitConfirm) + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Namespace, bool) *fftypes.Message); ok { + r0 = rf(ctx, ns, waitConfirm) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*fftypes.Message) @@ -145,8 +145,8 @@ func (_m *Manager) BroadcastRootOrgDefinition(ctx context.Context, def *fftypes. } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Organization, *fftypes.Identity, fftypes.SystemTag, bool) error); ok { - r1 = rf(ctx, def, signingIdentity, tag, waitConfirm) + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Namespace, bool) error); ok { + r1 = rf(ctx, ns, waitConfirm) } else { r1 = ret.Error(1) } diff --git a/mocks/databasemocks/plugin.go b/mocks/databasemocks/plugin.go index 37ae108cdd..9299127a98 100644 --- a/mocks/databasemocks/plugin.go +++ b/mocks/databasemocks/plugin.go @@ -1069,6 +1069,107 @@ func (_m *Plugin) GetGroups(ctx context.Context, filter database.Filter) ([]*fft return r0, r1, r2 } +// GetIdentities provides a mock function with given fields: ctx, filter +func (_m *Plugin) GetIdentities(ctx context.Context, filter database.Filter) ([]*fftypes.Identity, *database.FilterResult, error) { + ret := _m.Called(ctx, filter) + + var r0 []*fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*fftypes.Identity); ok { + r0 = rf(ctx, filter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*fftypes.Identity) + } + } + + var r1 *database.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { + r1 = rf(ctx, filter) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*database.FilterResult) + } + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { + r2 = rf(ctx, filter) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// GetIdentityByDID provides a mock function with given fields: ctx, did +func (_m *Plugin) GetIdentityByDID(ctx context.Context, did string) (*fftypes.Identity, error) { + ret := _m.Called(ctx, did) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Identity); ok { + r0 = rf(ctx, did) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, did) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetIdentityByID provides a mock function with given fields: ctx, id +func (_m *Plugin) GetIdentityByID(ctx context.Context, id *fftypes.UUID) (*fftypes.Identity, error) { + ret := _m.Called(ctx, id) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID) *fftypes.Identity); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.UUID) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetIdentityByName provides a mock function with given fields: ctx, iType, namespace, name +func (_m *Plugin) GetIdentityByName(ctx context.Context, iType fftypes.FFEnum, namespace string, name string) (*fftypes.Identity, error) { + ret := _m.Called(ctx, iType, namespace, name) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, fftypes.FFEnum, string, string) *fftypes.Identity); ok { + r0 = rf(ctx, iType, namespace, name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, fftypes.FFEnum, string, string) error); ok { + r1 = rf(ctx, iType, namespace, name) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetMessageByID provides a mock function with given fields: ctx, id func (_m *Plugin) GetMessageByID(ctx context.Context, id *fftypes.UUID) (*fftypes.Message, error) { ret := _m.Called(ctx, id) @@ -1289,84 +1390,6 @@ func (_m *Plugin) GetNextPins(ctx context.Context, filter database.Filter) ([]*f return r0, r1, r2 } -// GetNode provides a mock function with given fields: ctx, owner, name -func (_m *Plugin) GetNode(ctx context.Context, owner string, name string) (*fftypes.Node, error) { - ret := _m.Called(ctx, owner, name) - - var r0 *fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, string, string) *fftypes.Node); ok { - r0 = rf(ctx, owner, name) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Node) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { - r1 = rf(ctx, owner, name) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetNodeByID provides a mock function with given fields: ctx, id -func (_m *Plugin) GetNodeByID(ctx context.Context, id *fftypes.UUID) (*fftypes.Node, error) { - ret := _m.Called(ctx, id) - - var r0 *fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID) *fftypes.Node); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Node) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.UUID) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetNodes provides a mock function with given fields: ctx, filter -func (_m *Plugin) GetNodes(ctx context.Context, filter database.Filter) ([]*fftypes.Node, *database.FilterResult, error) { - ret := _m.Called(ctx, filter) - - var r0 []*fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*fftypes.Node); ok { - r0 = rf(ctx, filter) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*fftypes.Node) - } - } - - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { - r1 = rf(ctx, filter) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) - } - } - - var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { - r2 = rf(ctx, filter) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - // GetNonce provides a mock function with given fields: ctx, hash func (_m *Plugin) GetNonce(ctx context.Context, hash *fftypes.Bytes32) (*fftypes.Nonce, error) { ret := _m.Called(ctx, hash) @@ -1532,107 +1555,6 @@ func (_m *Plugin) GetOperations(ctx context.Context, filter database.Filter) ([] return r0, r1, r2 } -// GetOrganizationByID provides a mock function with given fields: ctx, id -func (_m *Plugin) GetOrganizationByID(ctx context.Context, id *fftypes.UUID) (*fftypes.Organization, error) { - ret := _m.Called(ctx, id) - - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID) *fftypes.Organization); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.UUID) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetOrganizationByIdentity provides a mock function with given fields: ctx, identity -func (_m *Plugin) GetOrganizationByIdentity(ctx context.Context, identity string) (*fftypes.Organization, error) { - ret := _m.Called(ctx, identity) - - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Organization); ok { - r0 = rf(ctx, identity) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, identity) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetOrganizationByName provides a mock function with given fields: ctx, name -func (_m *Plugin) GetOrganizationByName(ctx context.Context, name string) (*fftypes.Organization, error) { - ret := _m.Called(ctx, name) - - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Organization); ok { - r0 = rf(ctx, name) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, name) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetOrganizations provides a mock function with given fields: ctx, filter -func (_m *Plugin) GetOrganizations(ctx context.Context, filter database.Filter) ([]*fftypes.Organization, *database.FilterResult, error) { - ret := _m.Called(ctx, filter) - - var r0 []*fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*fftypes.Organization); ok { - r0 = rf(ctx, filter) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*fftypes.Organization) - } - } - - var r1 *database.FilterResult - if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { - r1 = rf(ctx, filter) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(*database.FilterResult) - } - } - - var r2 error - if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { - r2 = rf(ctx, filter) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - // GetPins provides a mock function with given fields: ctx, filter func (_m *Plugin) GetPins(ctx context.Context, filter database.Filter) ([]*fftypes.Pin, *database.FilterResult, error) { ret := _m.Called(ctx, filter) @@ -2174,6 +2096,84 @@ func (_m *Plugin) GetTransactions(ctx context.Context, filter database.Filter) ( return r0, r1, r2 } +// GetVerifierByHash provides a mock function with given fields: ctx, hash +func (_m *Plugin) GetVerifierByHash(ctx context.Context, hash *fftypes.Bytes32) (*fftypes.Verifier, error) { + ret := _m.Called(ctx, hash) + + var r0 *fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Bytes32) *fftypes.Verifier); ok { + r0 = rf(ctx, hash) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Verifier) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Bytes32) error); ok { + r1 = rf(ctx, hash) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetVerifierByValue provides a mock function with given fields: ctx, vType, namespace, value +func (_m *Plugin) GetVerifierByValue(ctx context.Context, vType fftypes.FFEnum, namespace string, value string) (*fftypes.Verifier, error) { + ret := _m.Called(ctx, vType, namespace, value) + + var r0 *fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, fftypes.FFEnum, string, string) *fftypes.Verifier); ok { + r0 = rf(ctx, vType, namespace, value) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Verifier) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, fftypes.FFEnum, string, string) error); ok { + r1 = rf(ctx, vType, namespace, value) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetVerifiers provides a mock function with given fields: ctx, filter +func (_m *Plugin) GetVerifiers(ctx context.Context, filter database.Filter) ([]*fftypes.Verifier, *database.FilterResult, error) { + ret := _m.Called(ctx, filter) + + var r0 []*fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, database.Filter) []*fftypes.Verifier); ok { + r0 = rf(ctx, filter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*fftypes.Verifier) + } + } + + var r1 *database.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, database.Filter) *database.FilterResult); ok { + r1 = rf(ctx, filter) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*database.FilterResult) + } + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, database.Filter) error); ok { + r2 = rf(ctx, filter) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + // Init provides a mock function with given fields: ctx, prefix, callbacks func (_m *Plugin) Init(ctx context.Context, prefix config.Prefix, callbacks database.Callbacks) error { ret := _m.Called(ctx, prefix, callbacks) @@ -2403,6 +2403,20 @@ func (_m *Plugin) UpdateGroup(ctx context.Context, hash *fftypes.Bytes32, update return r0 } +// UpdateIdentity provides a mock function with given fields: ctx, id, update +func (_m *Plugin) UpdateIdentity(ctx context.Context, id *fftypes.UUID, update database.Update) error { + ret := _m.Called(ctx, id, update) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID, database.Update) error); ok { + r0 = rf(ctx, id, update) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpdateMessage provides a mock function with given fields: ctx, id, update func (_m *Plugin) UpdateMessage(ctx context.Context, id *fftypes.UUID, update database.Update) error { ret := _m.Called(ctx, id, update) @@ -2445,20 +2459,6 @@ func (_m *Plugin) UpdateNextPin(ctx context.Context, sequence int64, update data return r0 } -// UpdateNode provides a mock function with given fields: ctx, id, update -func (_m *Plugin) UpdateNode(ctx context.Context, id *fftypes.UUID, update database.Update) error { - ret := _m.Called(ctx, id, update) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID, database.Update) error); ok { - r0 = rf(ctx, id, update) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // UpdateOffset provides a mock function with given fields: ctx, rowID, update func (_m *Plugin) UpdateOffset(ctx context.Context, rowID int64, update database.Update) error { ret := _m.Called(ctx, rowID, update) @@ -2473,20 +2473,6 @@ func (_m *Plugin) UpdateOffset(ctx context.Context, rowID int64, update database return r0 } -// UpdateOrganization provides a mock function with given fields: ctx, id, update -func (_m *Plugin) UpdateOrganization(ctx context.Context, id *fftypes.UUID, update database.Update) error { - ret := _m.Called(ctx, id, update) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID, database.Update) error); ok { - r0 = rf(ctx, id, update) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // UpdatePins provides a mock function with given fields: ctx, filter, update func (_m *Plugin) UpdatePins(ctx context.Context, filter database.Filter, update database.Update) error { ret := _m.Called(ctx, filter, update) @@ -2543,6 +2529,20 @@ func (_m *Plugin) UpdateTransaction(ctx context.Context, id *fftypes.UUID, updat return r0 } +// UpdateVerifier provides a mock function with given fields: ctx, hash, update +func (_m *Plugin) UpdateVerifier(ctx context.Context, hash *fftypes.Bytes32, update database.Update) error { + ret := _m.Called(ctx, hash, update) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Bytes32, database.Update) error); ok { + r0 = rf(ctx, hash, update) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // UpsertBatch provides a mock function with given fields: ctx, data func (_m *Plugin) UpsertBatch(ctx context.Context, data *fftypes.Batch) error { ret := _m.Called(ctx, data) @@ -2683,13 +2683,13 @@ func (_m *Plugin) UpsertGroup(ctx context.Context, data *fftypes.Group, optimiza return r0 } -// UpsertMessage provides a mock function with given fields: ctx, message, optimization -func (_m *Plugin) UpsertMessage(ctx context.Context, message *fftypes.Message, optimization database.UpsertOptimization) error { - ret := _m.Called(ctx, message, optimization) +// UpsertIdentity provides a mock function with given fields: ctx, data, optimization +func (_m *Plugin) UpsertIdentity(ctx context.Context, data *fftypes.Identity, optimization database.UpsertOptimization) error { + ret := _m.Called(ctx, data, optimization) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Message, database.UpsertOptimization) error); ok { - r0 = rf(ctx, message, optimization) + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Identity, database.UpsertOptimization) error); ok { + r0 = rf(ctx, data, optimization) } else { r0 = ret.Error(0) } @@ -2697,13 +2697,13 @@ func (_m *Plugin) UpsertMessage(ctx context.Context, message *fftypes.Message, o return r0 } -// UpsertNamespace provides a mock function with given fields: ctx, data, allowExisting -func (_m *Plugin) UpsertNamespace(ctx context.Context, data *fftypes.Namespace, allowExisting bool) error { - ret := _m.Called(ctx, data, allowExisting) +// UpsertMessage provides a mock function with given fields: ctx, message, optimization +func (_m *Plugin) UpsertMessage(ctx context.Context, message *fftypes.Message, optimization database.UpsertOptimization) error { + ret := _m.Called(ctx, message, optimization) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Namespace, bool) error); ok { - r0 = rf(ctx, data, allowExisting) + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Message, database.UpsertOptimization) error); ok { + r0 = rf(ctx, message, optimization) } else { r0 = ret.Error(0) } @@ -2711,12 +2711,12 @@ func (_m *Plugin) UpsertNamespace(ctx context.Context, data *fftypes.Namespace, return r0 } -// UpsertNode provides a mock function with given fields: ctx, data, allowExisting -func (_m *Plugin) UpsertNode(ctx context.Context, data *fftypes.Node, allowExisting bool) error { +// UpsertNamespace provides a mock function with given fields: ctx, data, allowExisting +func (_m *Plugin) UpsertNamespace(ctx context.Context, data *fftypes.Namespace, allowExisting bool) error { ret := _m.Called(ctx, data, allowExisting) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Node, bool) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Namespace, bool) error); ok { r0 = rf(ctx, data, allowExisting) } else { r0 = ret.Error(0) @@ -2753,20 +2753,6 @@ func (_m *Plugin) UpsertOffset(ctx context.Context, data *fftypes.Offset, allowE return r0 } -// UpsertOrganization provides a mock function with given fields: ctx, data, allowExisting -func (_m *Plugin) UpsertOrganization(ctx context.Context, data *fftypes.Organization, allowExisting bool) error { - ret := _m.Called(ctx, data, allowExisting) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Organization, bool) error); ok { - r0 = rf(ctx, data, allowExisting) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // UpsertPin provides a mock function with given fields: ctx, parked func (_m *Plugin) UpsertPin(ctx context.Context, parked *fftypes.Pin) error { ret := _m.Called(ctx, parked) @@ -2836,3 +2822,17 @@ func (_m *Plugin) UpsertTokenTransfer(ctx context.Context, transfer *fftypes.Tok return r0 } + +// UpsertVerifier provides a mock function with given fields: ctx, data, optimization +func (_m *Plugin) UpsertVerifier(ctx context.Context, data *fftypes.Verifier, optimization database.UpsertOptimization) error { + ret := _m.Called(ctx, data, optimization) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Verifier, database.UpsertOptimization) error); ok { + r0 = rf(ctx, data, optimization) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/mocks/dataexchangemocks/plugin.go b/mocks/dataexchangemocks/plugin.go index 48f4e5fe13..6a1209921e 100644 --- a/mocks/dataexchangemocks/plugin.go +++ b/mocks/dataexchangemocks/plugin.go @@ -22,11 +22,11 @@ type Plugin struct { } // AddPeer provides a mock function with given fields: ctx, peer -func (_m *Plugin) AddPeer(ctx context.Context, peer fftypes.DXInfo) error { +func (_m *Plugin) AddPeer(ctx context.Context, peer fftypes.JSONObject) error { ret := _m.Called(ctx, peer) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, fftypes.DXInfo) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, fftypes.JSONObject) error); ok { r0 = rf(ctx, peer) } else { r0 = ret.Error(0) @@ -105,14 +105,16 @@ func (_m *Plugin) DownloadBLOB(ctx context.Context, payloadRef string) (io.ReadC } // GetEndpointInfo provides a mock function with given fields: ctx -func (_m *Plugin) GetEndpointInfo(ctx context.Context) (fftypes.DXInfo, error) { +func (_m *Plugin) GetEndpointInfo(ctx context.Context) (fftypes.JSONObject, error) { ret := _m.Called(ctx) - var r0 fftypes.DXInfo - if rf, ok := ret.Get(0).(func(context.Context) fftypes.DXInfo); ok { + var r0 fftypes.JSONObject + if rf, ok := ret.Get(0).(func(context.Context) fftypes.JSONObject); ok { r0 = rf(ctx) } else { - r0 = ret.Get(0).(fftypes.DXInfo) + if ret.Get(0) != nil { + r0 = ret.Get(0).(fftypes.JSONObject) + } } var r1 error @@ -126,11 +128,11 @@ func (_m *Plugin) GetEndpointInfo(ctx context.Context) (fftypes.DXInfo, error) { } // Init provides a mock function with given fields: ctx, prefix, nodes, callbacks -func (_m *Plugin) Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.DXInfo, callbacks dataexchange.Callbacks) error { +func (_m *Plugin) Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.JSONObject, callbacks dataexchange.Callbacks) error { ret := _m.Called(ctx, prefix, nodes, callbacks) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, config.Prefix, []fftypes.DXInfo, dataexchange.Callbacks) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, config.Prefix, []fftypes.JSONObject, dataexchange.Callbacks) error); ok { r0 = rf(ctx, prefix, nodes, callbacks) } else { r0 = ret.Error(0) diff --git a/mocks/definitionsmocks/definition_handlers.go b/mocks/definitionsmocks/definition_handlers.go index 20cf216f0c..91b38ec4c4 100644 --- a/mocks/definitionsmocks/definition_handlers.go +++ b/mocks/definitionsmocks/definition_handlers.go @@ -94,34 +94,25 @@ func (_m *DefinitionHandlers) GetGroupsNS(ctx context.Context, ns string, filter return r0, r1, r2 } -// HandleDefinitionBroadcast provides a mock function with given fields: ctx, msg, data, tx -func (_m *DefinitionHandlers) HandleDefinitionBroadcast(ctx context.Context, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (definitions.DefinitionMessageAction, *definitions.DefinitionBatchActions, error) { - ret := _m.Called(ctx, msg, data, tx) +// HandleDefinitionBroadcast provides a mock function with given fields: ctx, state, msg, data, tx +func (_m *DefinitionHandlers) HandleDefinitionBroadcast(ctx context.Context, state definitions.DefinitionBatchState, msg *fftypes.Message, data []*fftypes.Data, tx *fftypes.UUID) (definitions.HandlerResult, error) { + ret := _m.Called(ctx, state, msg, data, tx) - var r0 definitions.DefinitionMessageAction - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Message, []*fftypes.Data, *fftypes.UUID) definitions.DefinitionMessageAction); ok { - r0 = rf(ctx, msg, data, tx) + var r0 definitions.HandlerResult + if rf, ok := ret.Get(0).(func(context.Context, definitions.DefinitionBatchState, *fftypes.Message, []*fftypes.Data, *fftypes.UUID) definitions.HandlerResult); ok { + r0 = rf(ctx, state, msg, data, tx) } else { - r0 = ret.Get(0).(definitions.DefinitionMessageAction) + r0 = ret.Get(0).(definitions.HandlerResult) } - var r1 *definitions.DefinitionBatchActions - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Message, []*fftypes.Data, *fftypes.UUID) *definitions.DefinitionBatchActions); ok { - r1 = rf(ctx, msg, data, tx) - } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(*definitions.DefinitionBatchActions) - } - } - - var r2 error - if rf, ok := ret.Get(2).(func(context.Context, *fftypes.Message, []*fftypes.Data, *fftypes.UUID) error); ok { - r2 = rf(ctx, msg, data, tx) + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, definitions.DefinitionBatchState, *fftypes.Message, []*fftypes.Data, *fftypes.UUID) error); ok { + r1 = rf(ctx, state, msg, data, tx) } else { - r2 = ret.Error(2) + r1 = ret.Error(1) } - return r0, r1, r2 + return r0, r1 } // ResolveInitGroup provides a mock function with given fields: ctx, msg diff --git a/mocks/eventmocks/event_manager.go b/mocks/eventmocks/event_manager.go index bcf0a589f9..67f8d936ac 100644 --- a/mocks/eventmocks/event_manager.go +++ b/mocks/eventmocks/event_manager.go @@ -51,13 +51,13 @@ func (_m *EventManager) BLOBReceived(dx dataexchange.Plugin, peerID string, hash return r0 } -// BatchPinComplete provides a mock function with given fields: bi, batch, signingIdentity -func (_m *EventManager) BatchPinComplete(bi blockchain.Plugin, batch *blockchain.BatchPin, signingIdentity string) error { - ret := _m.Called(bi, batch, signingIdentity) +// BatchPinComplete provides a mock function with given fields: bi, batch, signingKey +func (_m *EventManager) BatchPinComplete(bi blockchain.Plugin, batch *blockchain.BatchPin, signingKey *fftypes.VerifierRef) error { + ret := _m.Called(bi, batch, signingKey) var r0 error - if rf, ok := ret.Get(0).(func(blockchain.Plugin, *blockchain.BatchPin, string) error); ok { - r0 = rf(bi, batch, signingIdentity) + if rf, ok := ret.Get(0).(func(blockchain.Plugin, *blockchain.BatchPin, *fftypes.VerifierRef) error); ok { + r0 = rf(bi, batch, signingKey) } else { r0 = ret.Error(0) } diff --git a/mocks/identitymanagermocks/manager.go b/mocks/identitymanagermocks/manager.go index 74ee5b27d3..7bb3dc823e 100644 --- a/mocks/identitymanagermocks/manager.go +++ b/mocks/identitymanagermocks/manager.go @@ -15,20 +15,52 @@ type Manager struct { mock.Mock } -// GetLocalOrgKey provides a mock function with given fields: ctx -func (_m *Manager) GetLocalOrgKey(ctx context.Context) (string, error) { - ret := _m.Called(ctx) +// CachedIdentityLookup provides a mock function with given fields: ctx, did +func (_m *Manager) CachedIdentityLookup(ctx context.Context, did string) (*fftypes.Identity, bool, error) { + ret := _m.Called(ctx, did) - var r0 string - if rf, ok := ret.Get(0).(func(context.Context) string); ok { - r0 = rf(ctx) + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Identity); ok { + r0 = rf(ctx, did) } else { - r0 = ret.Get(0).(string) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 bool + if rf, ok := ret.Get(1).(func(context.Context, string) bool); ok { + r1 = rf(ctx, did) + } else { + r1 = ret.Get(1).(bool) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string) error); ok { + r2 = rf(ctx, did) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// CachedIdentityLookupByID provides a mock function with given fields: ctx, id +func (_m *Manager) CachedIdentityLookupByID(ctx context.Context, id *fftypes.UUID) (*fftypes.Identity, error) { + ret := _m.Called(ctx, id) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.UUID) *fftypes.Identity); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } } var r1 error - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.UUID) error); ok { + r1 = rf(ctx, id) } else { r1 = ret.Error(1) } @@ -36,22 +68,22 @@ func (_m *Manager) GetLocalOrgKey(ctx context.Context) (string, error) { return r0, r1 } -// GetLocalOrganization provides a mock function with given fields: ctx -func (_m *Manager) GetLocalOrganization(ctx context.Context) (*fftypes.Organization, error) { - ret := _m.Called(ctx) +// CachedVerifierLookup provides a mock function with given fields: ctx, vType, ns, value +func (_m *Manager) CachedVerifierLookup(ctx context.Context, vType fftypes.FFEnum, ns string, value string) (*fftypes.Verifier, error) { + ret := _m.Called(ctx, vType, ns, value) - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context) *fftypes.Organization); ok { - r0 = rf(ctx) + var r0 *fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, fftypes.FFEnum, string, string) *fftypes.Verifier); ok { + r0 = rf(ctx, vType, ns, value) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) + r0 = ret.Get(0).(*fftypes.Verifier) } } var r1 error - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) + if rf, ok := ret.Get(1).(func(context.Context, fftypes.FFEnum, string, string) error); ok { + r1 = rf(ctx, vType, ns, value) } else { r1 = ret.Error(1) } @@ -59,43 +91,63 @@ func (_m *Manager) GetLocalOrganization(ctx context.Context) (*fftypes.Organizat return r0, r1 } -// OrgDID provides a mock function with given fields: org -func (_m *Manager) OrgDID(org *fftypes.Organization) string { - ret := _m.Called(org) +// FindIdentityForVerifier provides a mock function with given fields: ctx, iTypes, namespace, verifier +func (_m *Manager) FindIdentityForVerifier(ctx context.Context, iTypes []fftypes.FFEnum, namespace string, verifier *fftypes.VerifierRef) (*fftypes.Identity, error) { + ret := _m.Called(ctx, iTypes, namespace, verifier) - var r0 string - if rf, ok := ret.Get(0).(func(*fftypes.Organization) string); ok { - r0 = rf(org) + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, []fftypes.FFEnum, string, *fftypes.VerifierRef) *fftypes.Identity); ok { + r0 = rf(ctx, iTypes, namespace, verifier) } else { - r0 = ret.Get(0).(string) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } } - return r0 + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []fftypes.FFEnum, string, *fftypes.VerifierRef) error); ok { + r1 = rf(ctx, iTypes, namespace, verifier) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } -// ResolveInputIdentity provides a mock function with given fields: ctx, _a1 -func (_m *Manager) ResolveInputIdentity(ctx context.Context, _a1 *fftypes.Identity) error { - ret := _m.Called(ctx, _a1) +// GetNodeOwnerBlockchainKey provides a mock function with given fields: ctx +func (_m *Manager) GetNodeOwnerBlockchainKey(ctx context.Context) (*fftypes.VerifierRef, error) { + ret := _m.Called(ctx) - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Identity) error); ok { - r0 = rf(ctx, _a1) + var r0 *fftypes.VerifierRef + if rf, ok := ret.Get(0).(func(context.Context) *fftypes.VerifierRef); ok { + r0 = rf(ctx) } else { - r0 = ret.Error(0) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.VerifierRef) + } } - return r0 + var r1 error + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 } -// ResolveLocalOrgDID provides a mock function with given fields: ctx -func (_m *Manager) ResolveLocalOrgDID(ctx context.Context) (string, error) { +// GetNodeOwnerOrg provides a mock function with given fields: ctx +func (_m *Manager) GetNodeOwnerOrg(ctx context.Context) (*fftypes.Identity, error) { ret := _m.Called(ctx) - var r0 string - if rf, ok := ret.Get(0).(func(context.Context) string); ok { + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context) *fftypes.Identity); ok { r0 = rf(ctx) } else { - r0 = ret.Get(0).(string) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } } var r1 error @@ -108,20 +160,20 @@ func (_m *Manager) ResolveLocalOrgDID(ctx context.Context) (string, error) { return r0, r1 } -// ResolveSigningKey provides a mock function with given fields: ctx, inputKey -func (_m *Manager) ResolveSigningKey(ctx context.Context, inputKey string) (string, error) { - ret := _m.Called(ctx, inputKey) +// NormalizeSigningKey provides a mock function with given fields: ctx, namespace, keyNormalizationMode +func (_m *Manager) NormalizeSigningKey(ctx context.Context, namespace string, keyNormalizationMode int) (string, error) { + ret := _m.Called(ctx, namespace, keyNormalizationMode) var r0 string - if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { - r0 = rf(ctx, inputKey) + if rf, ok := ret.Get(0).(func(context.Context, string, int) string); ok { + r0 = rf(ctx, namespace, keyNormalizationMode) } else { r0 = ret.Get(0).(string) } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, inputKey) + if rf, ok := ret.Get(1).(func(context.Context, string, int) error); ok { + r1 = rf(ctx, namespace, keyNormalizationMode) } else { r1 = ret.Error(1) } @@ -129,23 +181,83 @@ func (_m *Manager) ResolveSigningKey(ctx context.Context, inputKey string) (stri return r0, r1 } -// ResolveSigningKeyIdentity provides a mock function with given fields: ctx, signingKey -func (_m *Manager) ResolveSigningKeyIdentity(ctx context.Context, signingKey string) (string, error) { - ret := _m.Called(ctx, signingKey) +// ResolveIdentitySigner provides a mock function with given fields: ctx, _a1 +func (_m *Manager) ResolveIdentitySigner(ctx context.Context, _a1 *fftypes.Identity) (*fftypes.SignerRef, error) { + ret := _m.Called(ctx, _a1) - var r0 string - if rf, ok := ret.Get(0).(func(context.Context, string) string); ok { - r0 = rf(ctx, signingKey) + var r0 *fftypes.SignerRef + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Identity) *fftypes.SignerRef); ok { + r0 = rf(ctx, _a1) } else { - r0 = ret.Get(0).(string) + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.SignerRef) + } } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { - r1 = rf(ctx, signingKey) + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Identity) error); ok { + r1 = rf(ctx, _a1) } else { r1 = ret.Error(1) } return r0, r1 } + +// ResolveInputSigningIdentity provides a mock function with given fields: ctx, namespace, msgSignerRef +func (_m *Manager) ResolveInputSigningIdentity(ctx context.Context, namespace string, msgSignerRef *fftypes.SignerRef) error { + ret := _m.Called(ctx, namespace, msgSignerRef) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.SignerRef) error); ok { + r0 = rf(ctx, namespace, msgSignerRef) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// ResolveNodeOwnerSigningIdentity provides a mock function with given fields: ctx, msgSignerRef +func (_m *Manager) ResolveNodeOwnerSigningIdentity(ctx context.Context, msgSignerRef *fftypes.SignerRef) error { + ret := _m.Called(ctx, msgSignerRef) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.SignerRef) error); ok { + r0 = rf(ctx, msgSignerRef) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// VerifyIdentityChain provides a mock function with given fields: ctx, _a1 +func (_m *Manager) VerifyIdentityChain(ctx context.Context, _a1 *fftypes.Identity) (*fftypes.Identity, bool, error) { + ret := _m.Called(ctx, _a1) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Identity) *fftypes.Identity); ok { + r0 = rf(ctx, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 bool + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Identity) bool); ok { + r1 = rf(ctx, _a1) + } else { + r1 = ret.Get(1).(bool) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, *fftypes.Identity) error); ok { + r2 = rf(ctx, _a1) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} diff --git a/mocks/networkmapmocks/manager.go b/mocks/networkmapmocks/manager.go index 6c95e1774d..7ec1c297d3 100644 --- a/mocks/networkmapmocks/manager.go +++ b/mocks/networkmapmocks/manager.go @@ -9,6 +9,8 @@ import ( fftypes "github.com/hyperledger/firefly/pkg/fftypes" mock "github.com/stretchr/testify/mock" + + networkmap "github.com/hyperledger/firefly/internal/networkmap" ) // Manager is an autogenerated mock type for the Manager type @@ -16,16 +18,126 @@ type Manager struct { mock.Mock } +// GetDIDDocForIndentityByID provides a mock function with given fields: ctx, ns, id +func (_m *Manager) GetDIDDocForIndentityByID(ctx context.Context, ns string, id string) (*networkmap.DIDDocument, error) { + ret := _m.Called(ctx, ns, id) + + var r0 *networkmap.DIDDocument + if rf, ok := ret.Get(0).(func(context.Context, string, string) *networkmap.DIDDocument); ok { + r0 = rf(ctx, ns, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*networkmap.DIDDocument) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, ns, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetIdentities provides a mock function with given fields: ctx, ns, filter +func (_m *Manager) GetIdentities(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { + ret := _m.Called(ctx, ns, filter) + + var r0 []*fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*fftypes.Identity); ok { + r0 = rf(ctx, ns, filter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*fftypes.Identity) + } + } + + var r1 *database.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + r1 = rf(ctx, ns, filter) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*database.FilterResult) + } + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + r2 = rf(ctx, ns, filter) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// GetIdentityByID provides a mock function with given fields: ctx, ns, id +func (_m *Manager) GetIdentityByID(ctx context.Context, ns string, id string) (*fftypes.Identity, error) { + ret := _m.Called(ctx, ns, id) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string, string) *fftypes.Identity); ok { + r0 = rf(ctx, ns, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, ns, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetIdentityVerifiers provides a mock function with given fields: ctx, ns, id, filter +func (_m *Manager) GetIdentityVerifiers(ctx context.Context, ns string, id string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) { + ret := _m.Called(ctx, ns, id, filter) + + var r0 []*fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, string, string, database.AndFilter) []*fftypes.Verifier); ok { + r0 = rf(ctx, ns, id, filter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*fftypes.Verifier) + } + } + + var r1 *database.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, string, database.AndFilter) *database.FilterResult); ok { + r1 = rf(ctx, ns, id, filter) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(*database.FilterResult) + } + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, string, string, database.AndFilter) error); ok { + r2 = rf(ctx, ns, id, filter) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + // GetNodeByID provides a mock function with given fields: ctx, id -func (_m *Manager) GetNodeByID(ctx context.Context, id string) (*fftypes.Node, error) { +func (_m *Manager) GetNodeByID(ctx context.Context, id string) (*fftypes.Identity, error) { ret := _m.Called(ctx, id) - var r0 *fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Node); ok { + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Identity); ok { r0 = rf(ctx, id) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Node) + r0 = ret.Get(0).(*fftypes.Identity) } } @@ -40,15 +152,15 @@ func (_m *Manager) GetNodeByID(ctx context.Context, id string) (*fftypes.Node, e } // GetNodes provides a mock function with given fields: ctx, filter -func (_m *Manager) GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Node, *database.FilterResult, error) { +func (_m *Manager) GetNodes(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { ret := _m.Called(ctx, filter) - var r0 []*fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*fftypes.Node); ok { + var r0 []*fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*fftypes.Identity); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*fftypes.Node) + r0 = ret.Get(0).([]*fftypes.Identity) } } @@ -72,15 +184,15 @@ func (_m *Manager) GetNodes(ctx context.Context, filter database.AndFilter) ([]* } // GetOrganizationByID provides a mock function with given fields: ctx, id -func (_m *Manager) GetOrganizationByID(ctx context.Context, id string) (*fftypes.Organization, error) { +func (_m *Manager) GetOrganizationByID(ctx context.Context, id string) (*fftypes.Identity, error) { ret := _m.Called(ctx, id) - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Organization); ok { + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string) *fftypes.Identity); ok { r0 = rf(ctx, id) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) + r0 = ret.Get(0).(*fftypes.Identity) } } @@ -95,15 +207,15 @@ func (_m *Manager) GetOrganizationByID(ctx context.Context, id string) (*fftypes } // GetOrganizations provides a mock function with given fields: ctx, filter -func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Organization, *database.FilterResult, error) { +func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilter) ([]*fftypes.Identity, *database.FilterResult, error) { ret := _m.Called(ctx, filter) - var r0 []*fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*fftypes.Organization); ok { + var r0 []*fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, database.AndFilter) []*fftypes.Identity); ok { r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*fftypes.Organization) + r0 = ret.Get(0).([]*fftypes.Identity) } } @@ -126,31 +238,54 @@ func (_m *Manager) GetOrganizations(ctx context.Context, filter database.AndFilt return r0, r1, r2 } -// RegisterNode provides a mock function with given fields: ctx, waitConfirm -func (_m *Manager) RegisterNode(ctx context.Context, waitConfirm bool) (*fftypes.Node, *fftypes.Message, error) { - ret := _m.Called(ctx, waitConfirm) +// GetVerifierByHash provides a mock function with given fields: ctx, ns, hash +func (_m *Manager) GetVerifierByHash(ctx context.Context, ns string, hash string) (*fftypes.Verifier, error) { + ret := _m.Called(ctx, ns, hash) - var r0 *fftypes.Node - if rf, ok := ret.Get(0).(func(context.Context, bool) *fftypes.Node); ok { - r0 = rf(ctx, waitConfirm) + var r0 *fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, string, string) *fftypes.Verifier); ok { + r0 = rf(ctx, ns, hash) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Node) + r0 = ret.Get(0).(*fftypes.Verifier) } } - var r1 *fftypes.Message - if rf, ok := ret.Get(1).(func(context.Context, bool) *fftypes.Message); ok { - r1 = rf(ctx, waitConfirm) + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, ns, hash) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetVerifiers provides a mock function with given fields: ctx, ns, filter +func (_m *Manager) GetVerifiers(ctx context.Context, ns string, filter database.AndFilter) ([]*fftypes.Verifier, *database.FilterResult, error) { + ret := _m.Called(ctx, ns, filter) + + var r0 []*fftypes.Verifier + if rf, ok := ret.Get(0).(func(context.Context, string, database.AndFilter) []*fftypes.Verifier); ok { + r0 = rf(ctx, ns, filter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*fftypes.Verifier) + } + } + + var r1 *database.FilterResult + if rf, ok := ret.Get(1).(func(context.Context, string, database.AndFilter) *database.FilterResult); ok { + r1 = rf(ctx, ns, filter) } else { if ret.Get(1) != nil { - r1 = ret.Get(1).(*fftypes.Message) + r1 = ret.Get(1).(*database.FilterResult) } } var r2 error - if rf, ok := ret.Get(2).(func(context.Context, bool) error); ok { - r2 = rf(ctx, waitConfirm) + if rf, ok := ret.Get(2).(func(context.Context, string, database.AndFilter) error); ok { + r2 = rf(ctx, ns, filter) } else { r2 = ret.Error(2) } @@ -158,53 +293,90 @@ func (_m *Manager) RegisterNode(ctx context.Context, waitConfirm bool) (*fftypes return r0, r1, r2 } -// RegisterNodeOrganization provides a mock function with given fields: ctx, waitConfirm -func (_m *Manager) RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (*fftypes.Organization, *fftypes.Message, error) { +// RegisterIdentity provides a mock function with given fields: ctx, ns, dto, waitConfirm +func (_m *Manager) RegisterIdentity(ctx context.Context, ns string, dto *fftypes.IdentityCreateDTO, waitConfirm bool) (*fftypes.Identity, error) { + ret := _m.Called(ctx, ns, dto, waitConfirm) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.IdentityCreateDTO, bool) *fftypes.Identity); ok { + r0 = rf(ctx, ns, dto, waitConfirm) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.IdentityCreateDTO, bool) error); ok { + r1 = rf(ctx, ns, dto, waitConfirm) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RegisterNode provides a mock function with given fields: ctx, waitConfirm +func (_m *Manager) RegisterNode(ctx context.Context, waitConfirm bool) (*fftypes.Identity, error) { ret := _m.Called(ctx, waitConfirm) - var r0 *fftypes.Organization - if rf, ok := ret.Get(0).(func(context.Context, bool) *fftypes.Organization); ok { + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, bool) *fftypes.Identity); ok { r0 = rf(ctx, waitConfirm) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Organization) + r0 = ret.Get(0).(*fftypes.Identity) } } - var r1 *fftypes.Message - if rf, ok := ret.Get(1).(func(context.Context, bool) *fftypes.Message); ok { + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, bool) error); ok { r1 = rf(ctx, waitConfirm) } else { - if ret.Get(1) != nil { - r1 = ret.Get(1).(*fftypes.Message) + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RegisterNodeOrganization provides a mock function with given fields: ctx, waitConfirm +func (_m *Manager) RegisterNodeOrganization(ctx context.Context, waitConfirm bool) (*fftypes.Identity, error) { + ret := _m.Called(ctx, waitConfirm) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, bool) *fftypes.Identity); ok { + r0 = rf(ctx, waitConfirm) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) } } - var r2 error - if rf, ok := ret.Get(2).(func(context.Context, bool) error); ok { - r2 = rf(ctx, waitConfirm) + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, bool) error); ok { + r1 = rf(ctx, waitConfirm) } else { - r2 = ret.Error(2) + r1 = ret.Error(1) } - return r0, r1, r2 + return r0, r1 } // RegisterOrganization provides a mock function with given fields: ctx, org, waitConfirm -func (_m *Manager) RegisterOrganization(ctx context.Context, org *fftypes.Organization, waitConfirm bool) (*fftypes.Message, error) { +func (_m *Manager) RegisterOrganization(ctx context.Context, org *fftypes.IdentityCreateDTO, waitConfirm bool) (*fftypes.Identity, error) { ret := _m.Called(ctx, org, waitConfirm) - var r0 *fftypes.Message - if rf, ok := ret.Get(0).(func(context.Context, *fftypes.Organization, bool) *fftypes.Message); ok { + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, *fftypes.IdentityCreateDTO, bool) *fftypes.Identity); ok { r0 = rf(ctx, org, waitConfirm) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(*fftypes.Message) + r0 = ret.Get(0).(*fftypes.Identity) } } var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *fftypes.Organization, bool) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, *fftypes.IdentityCreateDTO, bool) error); ok { r1 = rf(ctx, org, waitConfirm) } else { r1 = ret.Error(1) @@ -212,3 +384,26 @@ func (_m *Manager) RegisterOrganization(ctx context.Context, org *fftypes.Organi return r0, r1 } + +// UpdateIdentity provides a mock function with given fields: ctx, ns, id, dto, waitConfirm +func (_m *Manager) UpdateIdentity(ctx context.Context, ns string, id string, dto *fftypes.IdentityUpdateDTO, waitConfirm bool) (*fftypes.Identity, error) { + ret := _m.Called(ctx, ns, id, dto, waitConfirm) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string, string, *fftypes.IdentityUpdateDTO, bool) *fftypes.Identity); ok { + r0 = rf(ctx, ns, id, dto, waitConfirm) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, string, *fftypes.IdentityUpdateDTO, bool) error); ok { + r1 = rf(ctx, ns, id, dto, waitConfirm) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/mocks/syncasyncmocks/bridge.go b/mocks/syncasyncmocks/bridge.go index 9531b5983c..96158d5021 100644 --- a/mocks/syncasyncmocks/bridge.go +++ b/mocks/syncasyncmocks/bridge.go @@ -23,6 +23,29 @@ func (_m *Bridge) Init(sysevents sysmessaging.SystemEvents) { _m.Called(sysevents) } +// WaitForIdentity provides a mock function with given fields: ctx, ns, id, send +func (_m *Bridge) WaitForIdentity(ctx context.Context, ns string, id *fftypes.UUID, send syncasync.RequestSender) (*fftypes.Identity, error) { + ret := _m.Called(ctx, ns, id, send) + + var r0 *fftypes.Identity + if rf, ok := ret.Get(0).(func(context.Context, string, *fftypes.UUID, syncasync.RequestSender) *fftypes.Identity); ok { + r0 = rf(ctx, ns, id, send) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*fftypes.Identity) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, *fftypes.UUID, syncasync.RequestSender) error); ok { + r1 = rf(ctx, ns, id, send) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // WaitForMessage provides a mock function with given fields: ctx, ns, id, send func (_m *Bridge) WaitForMessage(ctx context.Context, ns string, id *fftypes.UUID, send syncasync.RequestSender) (*fftypes.Message, error) { ret := _m.Called(ctx, ns, id, send) diff --git a/pkg/blockchain/plugin.go b/pkg/blockchain/plugin.go index 806a2b7295..95d57fa3f3 100644 --- a/pkg/blockchain/plugin.go +++ b/pkg/blockchain/plugin.go @@ -40,9 +40,13 @@ type Plugin interface { // Capabilities returns capabilities - not called until after Init Capabilities() *Capabilities - // ResolveSigningKey verifies that the supplied identity string is valid syntax according to the protocol. - // Can apply transformations to the supplied signing identity (only), such as lower case - ResolveSigningKey(ctx context.Context, signingKey string) (string, error) + // VerifierType returns the verifier (key) type that is used by this blockchain + VerifierType() fftypes.VerifierType + + // NormalizeSigningKey verifies that the supplied identity string is valid syntax according to the protocol. + // - Can apply transformations to the supplied signing identity (only), such as lower case. + // - Can perform sophisicated resolution, such as resolving a Fabric shortname to a MSP ID, or using an external REST API plugin to resolve a HD wallet address + NormalizeSigningKey(ctx context.Context, keyRef string) (string, error) // SubmitBatchPin sequences a batch of message globally to all viewers of a given ledger SubmitBatchPin(ctx context.Context, operationID *fftypes.UUID, ledgerID *fftypes.UUID, signingKey string, batch *BatchPin) error @@ -85,7 +89,7 @@ type Callbacks interface { // submitted by us, or by any other authorized party in the network. // // Error should will only be returned in shutdown scenarios - BatchPinComplete(batch *BatchPin, signingIdentity string) error + BatchPinComplete(batch *BatchPin, signingKey *fftypes.VerifierRef) error // BlockchainEvent notifies on the arrival of any event from a user-created subscription. BlockchainEvent(event *EventWithSubscription) error diff --git a/pkg/database/plugin.go b/pkg/database/plugin.go index e05cdabebd..2b062184ad 100644 --- a/pkg/database/plugin.go +++ b/pkg/database/plugin.go @@ -244,41 +244,41 @@ type iEventCollection interface { GetEvents(ctx context.Context, filter Filter) (message []*fftypes.Event, res *FilterResult, err error) } -type iOrganizationsCollection interface { - // UpsertOrganization - Upsert an organization - UpsertOrganization(ctx context.Context, data *fftypes.Organization, allowExisting bool) (err error) +type iIdentitiesCollection interface { + // UpsertIdentity - Upsert an identity + UpsertIdentity(ctx context.Context, data *fftypes.Identity, optimization UpsertOptimization) (err error) - // UpdateOrganization - Update organization - UpdateOrganization(ctx context.Context, id *fftypes.UUID, update Update) (err error) + // UpdateIdentity - Update identity + UpdateIdentity(ctx context.Context, id *fftypes.UUID, update Update) (err error) - // GetOrganizationByIdentity - Get a organization by identity - GetOrganizationByIdentity(ctx context.Context, identity string) (org *fftypes.Organization, err error) + // GetIdentityByDID - Get a identity by DID + GetIdentityByDID(ctx context.Context, did string) (org *fftypes.Identity, err error) - // GetOrganizationByName - Get a organization by name - GetOrganizationByName(ctx context.Context, name string) (org *fftypes.Organization, err error) + // GetIdentityByName - Get a identity by name + GetIdentityByName(ctx context.Context, iType fftypes.IdentityType, namespace, name string) (org *fftypes.Identity, err error) - // GetOrganizationByID - Get a organization by ID - GetOrganizationByID(ctx context.Context, id *fftypes.UUID) (org *fftypes.Organization, err error) + // GetIdentityByID - Get a identity by ID + GetIdentityByID(ctx context.Context, id *fftypes.UUID) (org *fftypes.Identity, err error) - // GetOrganizations - Get organizations - GetOrganizations(ctx context.Context, filter Filter) (org []*fftypes.Organization, res *FilterResult, err error) + // GetIdentities - Get identities + GetIdentities(ctx context.Context, filter Filter) (org []*fftypes.Identity, res *FilterResult, err error) } -type iNodeCollection interface { - // UpsertNode - Upsert a node - UpsertNode(ctx context.Context, data *fftypes.Node, allowExisting bool) (err error) +type iVerifiersCollection interface { + // UpsertVerifier - Upsert an verifier + UpsertVerifier(ctx context.Context, data *fftypes.Verifier, optimization UpsertOptimization) (err error) - // UpdateNode - Update node - UpdateNode(ctx context.Context, id *fftypes.UUID, update Update) (err error) + // UpdateVerifier - Update verifier + UpdateVerifier(ctx context.Context, hash *fftypes.Bytes32, update Update) (err error) - // GetNode - Get a node by ID - GetNode(ctx context.Context, owner, name string) (node *fftypes.Node, err error) + // GetVerifierByValue - Get a verifier by name + GetVerifierByValue(ctx context.Context, vType fftypes.VerifierType, namespace, value string) (org *fftypes.Verifier, err error) - // GetNodeByID- Get a node by ID - GetNodeByID(ctx context.Context, id *fftypes.UUID) (node *fftypes.Node, err error) + // GetVerifierByHash - Get a verifier by its hash + GetVerifierByHash(ctx context.Context, hash *fftypes.Bytes32) (org *fftypes.Verifier, err error) - // GetNodes - Get nodes - GetNodes(ctx context.Context, filter Filter) (node []*fftypes.Node, res *FilterResult, err error) + // GetVerifiers - Get verifiers + GetVerifiers(ctx context.Context, filter Filter) (org []*fftypes.Verifier, res *FilterResult, err error) } type iGroupCollection interface { @@ -530,8 +530,8 @@ type PersistenceInterface interface { iOperationCollection iSubscriptionCollection iEventCollection - iOrganizationsCollection - iNodeCollection + iIdentitiesCollection + iVerifiersCollection iGroupCollection iNonceCollection iNextPinCollection @@ -590,6 +590,7 @@ const ( CollectionFFIEvents UUIDCollectionNS = "ffievents" CollectionContractAPIs UUIDCollectionNS = "contractapis" CollectionContractSubscriptions UUIDCollectionNS = "contractsubscriptions" + CollectionIdentities UUIDCollectionNS = "identities" ) // HashCollectionNS is a collection where the primary key is a hash, such that it can @@ -598,7 +599,8 @@ const ( type HashCollectionNS CollectionName const ( - CollectionGroups HashCollectionNS = "groups" + CollectionGroups HashCollectionNS = "groups" + CollectionVerifiers HashCollectionNS = "verifiers" ) // UUIDCollection is like UUIDCollectionNS, but for objects that do not reside within a namespace @@ -606,8 +608,6 @@ type UUIDCollection CollectionName const ( CollectionNamespaces UUIDCollection = "namespaces" - CollectionNodes UUIDCollection = "nodes" - CollectionOrganizations UUIDCollection = "organizations" CollectionTokenTransfers UUIDCollection = "tokentransfers" CollectionTokenApprovals UUIDCollection = "tokenapprovals" ) @@ -782,13 +782,14 @@ var SubscriptionQueryFactory = &queryFields{ // EventQueryFactory filter fields for data events var EventQueryFactory = &queryFields{ - "id": &UUIDField{}, - "type": &StringField{}, - "namespace": &StringField{}, - "reference": &UUIDField{}, - "tx": &UUIDField{}, - "sequence": &Int64Field{}, - "created": &TimeField{}, + "id": &UUIDField{}, + "type": &StringField{}, + "namespace": &StringField{}, + "reference": &UUIDField{}, + "correlator": &UUIDField{}, + "tx": &UUIDField{}, + "sequence": &Int64Field{}, + "created": &TimeField{}, } // PinQueryFactory filter fields for parked contexts @@ -802,27 +803,31 @@ var PinQueryFactory = &queryFields{ "created": &TimeField{}, } -// OrganizationQueryFactory filter fields for organizations -var OrganizationQueryFactory = &queryFields{ - "id": &UUIDField{}, - "message": &UUIDField{}, - "parent": &StringField{}, - "identity": &StringField{}, - "description": &StringField{}, - "profile": &JSONField{}, - "created": &TimeField{}, -} - -// NodeQueryFactory filter fields for nodes -var NodeQueryFactory = &queryFields{ - "id": &UUIDField{}, - "message": &UUIDField{}, - "owner": &StringField{}, - "name": &StringField{}, - "description": &StringField{}, - "dx.peer": &StringField{}, - "dx.endpoint": &JSONField{}, - "created": &TimeField{}, +// IdentityQueryFactory filter fields for identities +var IdentityQueryFactory = &queryFields{ + "id": &UUIDField{}, + "did": &StringField{}, + "parent": &UUIDField{}, + "messages.claim": &UUIDField{}, + "messages.verification": &UUIDField{}, + "messages.update": &UUIDField{}, + "type": &StringField{}, + "namespace": &StringField{}, + "name": &StringField{}, + "description": &StringField{}, + "profile": &JSONField{}, + "created": &TimeField{}, + "updated": &TimeField{}, +} + +// VerifierQueryFactory filter fields for identities +var VerifierQueryFactory = &queryFields{ + "hash": &Bytes32Field{}, + "identity": &UUIDField{}, + "type": &StringField{}, + "namespace": &StringField{}, + "value": &StringField{}, + "created": &TimeField{}, } // GroupQueryFactory filter fields for nodes diff --git a/pkg/dataexchange/plugin.go b/pkg/dataexchange/plugin.go index aedf4fde84..de1166aff7 100644 --- a/pkg/dataexchange/plugin.go +++ b/pkg/dataexchange/plugin.go @@ -60,7 +60,7 @@ type Plugin interface { InitPrefix(prefix config.Prefix) // Init initializes the plugin, with configuration - Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.DXInfo, callbacks Callbacks) error + Init(ctx context.Context, prefix config.Prefix, nodes []fftypes.JSONObject, callbacks Callbacks) error // Data exchange interface must not deliver any events until start is called Start() error @@ -69,10 +69,10 @@ type Plugin interface { Capabilities() *Capabilities // GetEndpointInfo returns the information about the local endpoint - GetEndpointInfo(ctx context.Context) (peer fftypes.DXInfo, err error) + GetEndpointInfo(ctx context.Context) (peer fftypes.JSONObject, err error) // AddPeer translates the configuration published by another peer, into a reference string that is used between DX and FireFly to refer to the peer - AddPeer(ctx context.Context, peer fftypes.DXInfo) (err error) + AddPeer(ctx context.Context, peer fftypes.JSONObject) (err error) // UploadBLOB streams a blob to storage, and returns the hash to confirm the hash calculated in Core matches the hash calculated in the plugin UploadBLOB(ctx context.Context, ns string, id fftypes.UUID, content io.Reader) (payloadRef string, hash *fftypes.Bytes32, size int64, err error) diff --git a/pkg/fftypes/batch.go b/pkg/fftypes/batch.go index e78517cad6..db5bbc62a0 100644 --- a/pkg/fftypes/batch.go +++ b/pkg/fftypes/batch.go @@ -30,7 +30,7 @@ type Batch struct { Namespace string `json:"namespace"` Type MessageType `json:"type"` Node *UUID `json:"node,omitempty"` - Identity + SignerRef Group *Bytes32 `jdon:"group,omitempty"` Hash *Bytes32 `json:"hash"` Created *FFTime `json:"created"` diff --git a/pkg/fftypes/constants.go b/pkg/fftypes/constants.go index 817134d560..b15d29f6c5 100644 --- a/pkg/fftypes/constants.go +++ b/pkg/fftypes/constants.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -22,31 +22,38 @@ const ( SystemNamespace = "ff_system" ) -type SystemTag string - const ( - // SystemTagDefineDatatype is the topic for messages that broadcast data definitions - SystemTagDefineDatatype SystemTag = "ff_define_datatype" + // SystemTagDefineDatatype is the tag for messages that broadcast data definitions + SystemTagDefineDatatype = "ff_define_datatype" + + // SystemTagDefineNamespace is the tag for messages that broadcast namespace definitions + SystemTagDefineNamespace = "ff_define_namespace" + + // DeprecatedSystemTagDefineOrganization is the tag for messages that broadcast organization definitions + DeprecatedSystemTagDefineOrganization = "ff_define_organization" + + // DeprecatedSystemTagDefineNode is the tag for messages that broadcast node definitions + DeprecatedSystemTagDefineNode = "ff_define_node" - // SystemTagDefineNamespace is the topic for messages that broadcast namespace definitions - SystemTagDefineNamespace SystemTag = "ff_define_namespace" + // SystemTagDefineGroup is the tag for messages that send the definition of a group, to all parties in that group + SystemTagDefineGroup = "ff_define_group" - // SystemTagDefineOrganization is the topic for messages that broadcast organization definitions - SystemTagDefineOrganization SystemTag = "ff_define_organization" + // SystemTagDefinePool is the tag for messages that broadcast data definitions + SystemTagDefinePool = "ff_define_pool" - // SystemTagDefineNode is the topic for messages that broadcast node definitions - SystemTagDefineNode SystemTag = "ff_define_node" + // SystemTagDefineFFI is the tag for messages that broadcast contract FFIs + SystemTagDefineFFI = "ff_define_ffi" - // SystemTagDefineGroup is the topic for messages that send the definition of a group, to all parties in that group - SystemTagDefineGroup SystemTag = "ff_define_group" + // SystemTagDefineContractAPI is the tag for messages that broadcast contract APIs + SystemTagDefineContractAPI = "ff_define_contract_api" - // SystemTagDefinePool is the topic for messages that broadcast data definitions - SystemTagDefinePool SystemTag = "ff_define_pool" + // SystemTagIdentityClaim is the tag for messages that broadcast an identity claim + SystemTagIdentityClaim = "ff_identity_claim" - // SystemTagDefineFFI is the topic for messages that broadcast contract FFIs - SystemTagDefineFFI SystemTag = "ff_define_ffi" + // SystemTagIdentityVerification is the tag for messages that broadcast an identity verification + SystemTagIdentityVerification = "ff_identity_verification" - // SystemTagDefineContractAPI is the topic for messages that broadcast contract APIs - SystemTagDefineContractAPI SystemTag = "ff_define_contract_api" + // SystemTagIdentityUpdate is the tag for messages that broadcast an identity update + SystemTagIdentityUpdate = "ff_identity_update" ) diff --git a/pkg/fftypes/event.go b/pkg/fftypes/event.go index b47b30b267..bdf7a1e6f6 100644 --- a/pkg/fftypes/event.go +++ b/pkg/fftypes/event.go @@ -31,8 +31,10 @@ var ( EventTypeNamespaceConfirmed EventType = ffEnum("eventtype", "namespace_confirmed") // EventTypeDatatypeConfirmed occurs when a new datatype is ready for use (on the namespace of the datatype) EventTypeDatatypeConfirmed EventType = ffEnum("eventtype", "datatype_confirmed") - // EventTypeGroupConfirmed occurs when a new group is ready to use (on the namespace of the group, on all group participants) - EventTypeGroupConfirmed EventType = ffEnum("eventtype", "group_confirmed") + // EventTypeIdentityConfirmed occurs when a new identity has been confirmed, as as result of a signed claim broadcast, and any associated claim verification + EventTypeIdentityConfirmed EventType = ffEnum("eventtype", "identity_confirmed") + // EventTypeIdentityUpdated occurs when an existing identity is update by the owner of that identity + EventTypeIdentityUpdated EventType = ffEnum("eventtype", "identity_updated") // EventTypePoolConfirmed occurs when a new token pool is ready for use EventTypePoolConfirmed EventType = ffEnum("eventtype", "token_pool_confirmed") // EventTypeTransferConfirmed occurs when a token transfer has been confirmed @@ -58,6 +60,7 @@ type Event struct { Type EventType `json:"type" ffenum:"eventtype"` Namespace string `json:"namespace"` Reference *UUID `json:"reference"` + Correlator *UUID `json:"correlator,omitempty"` Transaction *UUID `json:"tx,omitempty"` Created *FFTime `json:"created"` } diff --git a/pkg/fftypes/group.go b/pkg/fftypes/group.go index 1b9ebcbf13..e17f5dfc10 100644 --- a/pkg/fftypes/group.go +++ b/pkg/fftypes/group.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -88,7 +88,7 @@ func (group *Group) Validate(ctx context.Context, existing bool) (err error) { } key := fmt.Sprintf("%s:%s", r.Node, r.Identity) if dupCheck[key] { - return i18n.NewError(ctx, i18n.MsgDuplicateMember, i) + return i18n.NewError(ctx, i18n.MsgDuplicateMember, i, key) } dupCheck[key] = true } diff --git a/pkg/fftypes/identity.go b/pkg/fftypes/identity.go index f5f8e1fe38..592b70e879 100644 --- a/pkg/fftypes/identity.go +++ b/pkg/fftypes/identity.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -16,9 +16,217 @@ package fftypes -// Identity is the nested structure representing an identity, that might comprise a resolvable -// by FireFly identity DID, a blockchain signing key, or both. +import ( + "context" + "crypto/sha256" + "fmt" + + "github.com/hyperledger/firefly/internal/i18n" + "github.com/hyperledger/firefly/internal/log" +) + +// IdentityType is the type of an identity +type IdentityType = FFEnum + +var ( + // IdentityTypeOrg is an organization + IdentityTypeOrg IdentityType = ffEnum("identitytype", "org") + // IdentityTypeNode is a node + IdentityTypeNode IdentityType = ffEnum("identitytype", "node") + // IdentityTypeCustom is a user defined identity within a namespace + IdentityTypeCustom IdentityType = ffEnum("identitytype", "custom") +) + +const ( + DIDPrefix = "did:" + FireFlyDIDPrefix = "did:firefly:" + FireFlyOrgDIDPrefix = "did:firefly:org/" + FireFlyNodeDIDPrefix = "did:firefly:node/" + FireFlyCustomDIDPrefix = "did:firefly:ns/" +) + +type IdentityMessages struct { + Claim *UUID `json:"claim"` + Verification *UUID `json:"verification"` + Update *UUID `json:"update"` +} + +// IdentityBase are the immutable fields of an identity that determine what the identity itself is +type IdentityBase struct { + ID *UUID `json:"id"` + DID string `json:"did"` + Type IdentityType `json:"type" ffenum:"identitytype"` + Parent *UUID `json:"parent,omitempty"` + Namespace string `json:"namespace"` + Name string `json:"name,omitempty"` +} + +// IdentityProfile are the field of a profile that can be updated over time +type IdentityProfile struct { + Description string `json:"description,omitempty"` + Profile JSONObject `json:"profile,omitempty"` +} + +// Identity is the persisted structure backing all identities, including orgs, nodes and custom identities type Identity struct { + IdentityBase + IdentityProfile + Messages IdentityMessages `json:"messages,omitempty"` + Created *FFTime `json:"created,omitempty"` + Updated *FFTime `json:"updated,omitempty"` +} + +// IdentityCreateDTO is the input structure to submit to register an identity. +// The blockchain key that will be used to establish the claim for the identity +// needs to be provided. +type IdentityCreateDTO struct { + Name string `json:"name"` + Type IdentityType `json:"type,omitempty"` + Parent *UUID `json:"parent,omitempty"` + Key string `json:"key,omitempty"` + IdentityProfile +} + +// IdentityUpdateDTO is the input structure to submit to update an identityprofile. +// The same key in the claim will be used for the update. +type IdentityUpdateDTO struct { + IdentityProfile +} + +// SignerRef is the nested structure representing the identity that signed a message. +// It might comprise a resolvable by FireFly identity DID, a blockchain signing key, or both. +type SignerRef struct { Author string `json:"author,omitempty"` Key string `json:"key,omitempty"` } + +// IdentityClaim is the data payload used in a message to broadcast an intent to publish a new identity. +// Most claims (except root orgs, where different requirements apply) require a separate IdentityVerification +// from the parent identity to be published (on the same topic) before the identity is considered valid +// and is stored as a confirmed identity. +type IdentityClaim struct { + Identity *Identity `json:"identity"` +} + +// IdentityVerification is the data payload used in message to broadcast a verification of a child identity. +// Must refer to the UUID and Hash of the IdentityClaim message, and must contain the same base identity data. +type IdentityVerification struct { + Claim MessageRef `json:"claim"` + Identity IdentityBase `json:"identity"` +} + +// IdentityUpdate is the data payload used in message to broadcast an update to an identity profile. +// The broadcast must be on the same identity as the currently established identity claim message for the identity, +// and it must contain the same identity data. +// The profile is replaced in its entirety. +type IdentityUpdate struct { + Identity IdentityBase `json:"identity"` + Updates IdentityProfile `json:"updates,omitempty"` +} + +func (ic *IdentityClaim) Topic() string { + return ic.Identity.Topic() +} + +func (ic *IdentityClaim) SetBroadcastMessage(msgID *UUID) { + ic.Identity.Messages.Claim = msgID +} + +func (iv *IdentityVerification) Topic() string { + return iv.Identity.Topic() +} + +func (iv *IdentityVerification) SetBroadcastMessage(msgID *UUID) { + // nop-op here, the definition handler of the claim is the one that is responsible for updating + // the verification message ID on the Identity. +} + +func (iu *IdentityUpdate) Topic() string { + return iu.Identity.Topic() +} + +func (iu *IdentityUpdate) SetBroadcastMessage(msgID *UUID) { + // nop-op here, as the IdentityUpdate doesn't have a reference to the original Identity to set this. +} + +func (i *IdentityBase) Topic() string { + h := sha256.New() + h.Write([]byte(i.DID)) + return HashResult(h).String() +} + +func (i *IdentityBase) Validate(ctx context.Context) (err error) { + if i.ID == nil { + return i18n.NewError(ctx, i18n.MsgNilID) + } + if err = ValidateFFNameFieldNoUUID(ctx, i.Namespace, "namespace"); err != nil { + return err + } + if err = ValidateFFNameFieldNoUUID(ctx, i.Name, "name"); err != nil { + return err + } + if requiredDID, err := i.GenerateDID(ctx); err != nil { + return err + } else if i.DID != requiredDID { + return i18n.NewError(ctx, i18n.MsgInvalidDIDForType, i.DID, i.Type, i.Namespace, i.Name) + } + return nil +} + +func (i *IdentityBase) GenerateDID(ctx context.Context) (string, error) { + switch i.Type { + case IdentityTypeCustom: + if i.Namespace == SystemNamespace { + return "", i18n.NewError(ctx, i18n.MsgCustomIdentitySystemNS, SystemNamespace) + } + if i.Parent == nil { + return "", i18n.NewError(ctx, i18n.MsgNilParentIdentity, i.Type) + } + return fmt.Sprintf("%s%s/%s", FireFlyCustomDIDPrefix, i.Namespace, i.Name), nil + case IdentityTypeNode: + if i.Namespace != SystemNamespace { + return "", i18n.NewError(ctx, i18n.MsgSystemIdentityCustomNS, SystemNamespace) + } + if i.Parent == nil { + return "", i18n.NewError(ctx, i18n.MsgNilParentIdentity, i.Type) + } + return fmt.Sprintf("%s%s", FireFlyNodeDIDPrefix, i.Name), nil + case IdentityTypeOrg: + if i.Namespace != SystemNamespace { + return "", i18n.NewError(ctx, i18n.MsgSystemIdentityCustomNS, SystemNamespace) + } + return fmt.Sprintf("%s%s", FireFlyOrgDIDPrefix, i.Name), nil + default: + return "", i18n.NewError(ctx, i18n.MsgUnknownIdentityType, i.Type) + } +} + +func (i *IdentityBase) Equals(ctx context.Context, i2 *IdentityBase) bool { + if err := i.Validate(ctx); err != nil { + log.L(ctx).Warnf("Comparing invalid identity (source) %s (%v): %s", i.DID, i.ID, err) + return false + } + if err := i2.Validate(ctx); err != nil { + log.L(ctx).Warnf("Comparing invalid identity (target) %s (%v): %s", i.DID, i2.ID, err) + return false + } + return i.ID.Equals(i2.ID) && + i.DID == i2.DID && + i.Type == i2.Type && + i.Parent.Equals(i2.Parent) && + i.Namespace == i2.Namespace && + i.Name == i2.Name +} + +func (identity *Identity) Validate(ctx context.Context) (err error) { + if identity == nil { + return i18n.NewError(ctx, i18n.MsgNilOrNullObject) + } + if err = identity.IdentityBase.Validate(ctx); err != nil { + return err + } + if err = ValidateLength(ctx, identity.Description, "description", 4096); err != nil { + return err + } + return nil +} diff --git a/pkg/fftypes/identity_test.go b/pkg/fftypes/identity_test.go new file mode 100644 index 0000000000..65819efb61 --- /dev/null +++ b/pkg/fftypes/identity_test.go @@ -0,0 +1,237 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.identity/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package fftypes + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func testOrg() *Identity { + return &Identity{ + IdentityBase: IdentityBase{ + ID: NewUUID(), + DID: "did:firefly:org/org1", + Type: IdentityTypeOrg, + Namespace: SystemNamespace, + Name: "org1", + }, + IdentityProfile: IdentityProfile{ + Description: "desc", + Profile: JSONObject{ + "some": "profiledata", + }, + }, + } +} + +func testNode() *Identity { + return &Identity{ + IdentityBase: IdentityBase{ + ID: NewUUID(), + DID: "did:firefly:node/node1", + Parent: NewUUID(), + Type: IdentityTypeNode, + Namespace: SystemNamespace, + Name: "node1", + }, + IdentityProfile: IdentityProfile{ + Description: "desc", + Profile: JSONObject{ + "some": "profiledata", + }, + }, + } +} + +func testCustom(ns, name string) *Identity { + return &Identity{ + IdentityBase: IdentityBase{ + ID: NewUUID(), + DID: fmt.Sprintf("did:firefly:ns/%s/%s", ns, name), + Parent: NewUUID(), + Type: IdentityTypeCustom, + Namespace: ns, + Name: name, + }, + IdentityProfile: IdentityProfile{ + Description: "desc", + Profile: JSONObject{ + "some": "profiledata", + }, + }, + } +} + +func TestIdentityValidationOrgs(t *testing.T) { + + ctx := context.Background() + assert.Regexp(t, "FF10368", (*Identity)(nil).Validate(ctx)) + + o := testOrg() + assert.NoError(t, o.Validate(ctx)) + + o = testOrg() + o.ID = nil + assert.Regexp(t, "FF10203", o.Validate(ctx)) + + o = testOrg() + o.Namespace = "!namespace" + assert.Regexp(t, "FF10131", o.Validate(ctx)) + + o = testOrg() + o.Name = "!name" + assert.Regexp(t, "FF10131", o.Validate(ctx)) + + o = testOrg() + o.Type = IdentityType("wrong") + assert.Regexp(t, "FF10362", o.Validate(ctx)) + + o = testOrg() + o.Description = string(make([]byte, 4097)) + assert.Regexp(t, "FF10188", o.Validate(ctx)) + + o = testOrg() + o.DID = "did:firefly:node/node1" + assert.Regexp(t, "FF10363", o.Validate(ctx)) + + o = testOrg() + o.Namespace = "nonsystem" + assert.Regexp(t, "FF10361", o.Validate(ctx)) + +} + +func TestIdentityValidationNodes(t *testing.T) { + + ctx := context.Background() + n := testNode() + assert.NoError(t, n.Validate(ctx)) + + n = testNode() + n.Parent = nil + assert.Regexp(t, "FF10360", n.Validate(ctx)) + + n = testNode() + n.DID = "did:firefly:org/org1" + assert.Regexp(t, "FF10363", n.Validate(ctx)) + + n = testNode() + n.Namespace = "nonsystem" + assert.Regexp(t, "FF10361", n.Validate(ctx)) + +} + +func TestIdentityValidationCustom(t *testing.T) { + + ctx := context.Background() + c := testCustom("ns1", "custom1") + assert.NoError(t, c.Validate(ctx)) + + c = testCustom("ns1", "custom1") + c.Parent = nil + assert.Regexp(t, "FF10360", c.Validate(ctx)) + + c = testCustom("ns1", "custom1") + c.DID = "did:firefly:ns/ns2/custom1" + assert.Regexp(t, "FF10363", c.Validate(ctx)) + + c = testCustom("ns1", "custom1") + c.Namespace = SystemNamespace + assert.Regexp(t, "FF10359", c.Validate(ctx)) + +} + +func TestIdentityCompare(t *testing.T) { + + ctx := context.Background() + + getMatching := func() (*IdentityBase, *IdentityBase) { + i1 := testCustom("ns1", "custom1") + i2 := testCustom("ns1", "custom1") + *i1.ID = *i2.ID + *i1.Parent = *i2.Parent + return &i1.IdentityBase, &i2.IdentityBase + } + + i1, i2 := getMatching() + assert.True(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i1.ID = NewUUID() + assert.False(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i1.Parent = NewUUID() + assert.False(t, i1.Equals(ctx, i2)) + i1.Parent = nil + assert.False(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i1.DID = "bad" + assert.False(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i2.DID = "bad" + assert.False(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i2 = &testCustom("ns1", "custom2").IdentityBase + *i2.ID = *i1.ID + i1.Parent = nil + i2.Parent = nil + assert.False(t, i1.Equals(ctx, i2)) + + i1, i2 = getMatching() + i2 = &testCustom("ns2", "custom1").IdentityBase + *i2.ID = *i1.ID + i1.Parent = nil + i2.Parent = nil + assert.False(t, i1.Equals(ctx, i2)) +} + +func TestDefinitionObjects(t *testing.T) { + + o := testOrg() + assert.Equal(t, "7ea456fa05fc63778e7c4cb22d0498d73f184b2778c11fd2ba31b5980f8490b9", o.IdentityBase.Topic()) + assert.Equal(t, o.Topic(), o.IdentityBase.Topic()) + + ic := IdentityClaim{ + Identity: o, + } + assert.Equal(t, o.Topic(), ic.Topic()) + claimMsg := NewUUID() + ic.SetBroadcastMessage(claimMsg) + assert.Equal(t, *claimMsg, *o.Messages.Claim) + + iv := IdentityVerification{ + Identity: o.IdentityBase, + } + assert.Equal(t, o.Topic(), iv.Topic()) + verificationMsg := NewUUID() + iv.SetBroadcastMessage(verificationMsg) + + var iu Definition = &IdentityUpdate{ + Identity: o.IdentityBase, + } + assert.Equal(t, o.Topic(), iu.Topic()) + updateMsg := NewUUID() + iu.SetBroadcastMessage(updateMsg) + +} diff --git a/pkg/fftypes/jsonany.go b/pkg/fftypes/jsonany.go index 5986c71958..f8137f0848 100644 --- a/pkg/fftypes/jsonany.go +++ b/pkg/fftypes/jsonany.go @@ -19,6 +19,7 @@ package fftypes import ( "context" "crypto/sha256" + "database/sql/driver" "encoding/json" "github.com/hyperledger/firefly/internal/i18n" @@ -135,11 +136,18 @@ func (h *JSONAny) JSONObjectNowarn() JSONObject { return jo } +// Value ensures we write null to the DB for null values +func (h *JSONAny) Value() (driver.Value, error) { + if h.IsNil() { + return nil, nil + } + return string(*h), nil +} + // Scan implements sql.Scanner func (h *JSONAny) Scan(src interface{}) error { switch src := src.(type) { case nil: - *h = NullString return nil case []byte: return h.UnmarshalJSON(src) diff --git a/pkg/fftypes/jsonany_test.go b/pkg/fftypes/jsonany_test.go index 8077695402..8faf3e482f 100644 --- a/pkg/fftypes/jsonany_test.go +++ b/pkg/fftypes/jsonany_test.go @@ -108,7 +108,7 @@ func TestScan(t *testing.T) { var h JSONAny assert.Equal(t, int64(0), h.Length()) assert.NoError(t, h.Scan(nil)) - assert.Equal(t, []byte(NullString), []byte(h)) + assert.Empty(t, h) assert.NoError(t, h.Scan(`{"some": "stuff"}`)) assert.Equal(t, "stuff", h.JSONObject().GetString("some")) @@ -130,3 +130,25 @@ func TestScan(t *testing.T) { assert.Equal(t, int64(2), JSONAnyPtr("{}").Length()) } + +func TestValue(t *testing.T) { + + var h *JSONAny + v, err := h.Value() + assert.NoError(t, err) + assert.Nil(t, v) + err = h.Scan(v) + assert.NoError(t, err) + assert.Nil(t, h) + + h = JSONAnyPtr("") + v, err = h.Value() + assert.NoError(t, err) + assert.Nil(t, v) + + h = JSONAnyPtr("{}") + v, err = h.Value() + assert.NoError(t, err) + assert.Equal(t, "{}", v) + +} diff --git a/pkg/fftypes/jsonobject.go b/pkg/fftypes/jsonobject.go index df5f1d444a..45be31ac3f 100644 --- a/pkg/fftypes/jsonobject.go +++ b/pkg/fftypes/jsonobject.go @@ -185,6 +185,9 @@ func (jd JSONObject) GetStringArrayOk(key string) ([]string, bool) { // Value implements sql.Valuer func (jd JSONObject) Value() (driver.Value, error) { + if jd == nil { + return nil, nil + } b, err := json.Marshal(&jd) if err != nil { return nil, err diff --git a/pkg/fftypes/jsonobject_test.go b/pkg/fftypes/jsonobject_test.go index 983427566b..91bba78bad 100644 --- a/pkg/fftypes/jsonobject_test.go +++ b/pkg/fftypes/jsonobject_test.go @@ -100,7 +100,7 @@ func TestJSONObjectScan(t *testing.T) { sv, err = ((JSONObject)(nil)).Value() assert.NoError(t, err) - assert.Equal(t, NullString, sv) + assert.Nil(t, sv) var badData JSONObject = map[string]interface{}{"bad": map[bool]bool{false: true}} _, err = badData.Value() diff --git a/pkg/fftypes/message.go b/pkg/fftypes/message.go index 84f771d8cc..50d00034a1 100644 --- a/pkg/fftypes/message.go +++ b/pkg/fftypes/message.go @@ -33,7 +33,7 @@ const ( type MessageType = FFEnum var ( - // MessageTypeDefinition is a message broadcasting a definition of a system type, pre-defined by firefly (namespaces, members, data definitions, etc.) + // MessageTypeDefinition is a message broadcasting a definition of a system type, pre-defined by firefly (namespaces, identities, data definitions, etc.) MessageTypeDefinition MessageType = ffEnum("messagetype", "definition") // MessageTypeBroadcast is a broadcast message, meaning it is intended to be visible by all parties in the network MessageTypeBroadcast MessageType = ffEnum("messagetype", "broadcast") @@ -72,7 +72,7 @@ type MessageHeader struct { CID *UUID `json:"cid,omitempty"` Type MessageType `json:"type" ffenum:"messagetype"` TxType TransactionType `json:"txtype,omitempty"` - Identity + SignerRef Created *FFTime `json:"created,omitempty"` Namespace string `json:"namespace,omitempty"` Group *Bytes32 `json:"group,omitempty"` diff --git a/pkg/fftypes/message_test.go b/pkg/fftypes/message_test.go index c08b7b2ae8..24cda7e7b2 100644 --- a/pkg/fftypes/message_test.go +++ b/pkg/fftypes/message_test.go @@ -209,7 +209,7 @@ func TestSealKnownMessage(t *testing.T) { CID: cid, Type: MessageTypePrivate, TxType: TransactionTypeBatchPin, - Identity: Identity{ + SignerRef: SignerRef{ Author: "0x12345", }, Namespace: "ns1", diff --git a/pkg/fftypes/node.go b/pkg/fftypes/node.go index 4be938c80b..e7758ed764 100644 --- a/pkg/fftypes/node.go +++ b/pkg/fftypes/node.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -16,51 +16,63 @@ package fftypes -import ( - "context" +import "context" - "github.com/hyperledger/firefly/internal/i18n" -) +// DeprecatedNode is the data structure we used to use prior to FIR-9. +// Now we use the common Identity structure throughout +type DeprecatedNode struct { + ID *UUID `json:"id"` + Message *UUID `json:"message,omitempty"` + Owner string `json:"owner,omitempty"` + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + DX DeprecatedDXInfo `json:"dx"` + Created *FFTime `json:"created,omitempty"` -// Node is a FireFly node within the network -type Node struct { - ID *UUID `json:"id"` - Message *UUID `json:"message,omitempty"` - Owner string `json:"owner,omitempty"` - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - DX DXInfo `json:"dx"` - Created *FFTime `json:"created,omitempty"` + identityClaim *IdentityClaim } -// DXInfo is the data exchange information -type DXInfo struct { +type DeprecatedDXInfo struct { Peer string `json:"peer,omitempty"` Endpoint JSONObject `json:"endpoint,omitempty"` } -func (n *Node) Validate(ctx context.Context, existing bool) (err error) { - if err = ValidateFFNameFieldNoUUID(ctx, n.Name, "name"); err != nil { - return err +// Migrate creates and maintains a migrated IdentityClaim object, which +// is used when processing an old-style nodeanization broadcast received when +// joining an existing network +func (node *DeprecatedNode) Migrated() *IdentityClaim { + if node.identityClaim != nil { + return node.identityClaim } - if err = ValidateLength(ctx, n.Description, "description", 4096); err != nil { - return err + node.identityClaim = &IdentityClaim{ + Identity: &Identity{ + IdentityBase: IdentityBase{ + ID: node.ID, + Type: IdentityTypeNode, + Namespace: SystemNamespace, + Name: node.Name, + Parent: nil, // Must be set post migrate + }, + IdentityProfile: IdentityProfile{ + Description: node.Description, + Profile: node.DX.Endpoint, + }, + }, } - if n.Owner == "" { - return i18n.NewError(ctx, i18n.MsgOwnerMissing) - } - if existing { - if n.ID == nil { - return i18n.NewError(ctx, i18n.MsgNilID) - } - } - return nil + return node.identityClaim +} + +func (node *DeprecatedNode) AddMigratedParent(parentID *UUID) *IdentityClaim { + ic := node.Migrated() + ic.Identity.Parent = parentID + node.identityClaim.Identity.DID, _ = node.identityClaim.Identity.GenerateDID(context.Background()) + return ic } -func (n *Node) Topic() string { - return OrgTopic +func (node *DeprecatedNode) Topic() string { + return node.Migrated().Topic() } -func (n *Node) SetBroadcastMessage(msgID *UUID) { - n.Message = msgID +func (node *DeprecatedNode) SetBroadcastMessage(msgID *UUID) { + node.Migrated().SetBroadcastMessage(msgID) } diff --git a/pkg/fftypes/node_status.go b/pkg/fftypes/node_status.go index 1986fc45ce..cf90b0739f 100644 --- a/pkg/fftypes/node_status.go +++ b/pkg/fftypes/node_status.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -34,7 +34,7 @@ type NodeStatusNode struct { type NodeStatusOrg struct { Name string `json:"name"` Registered bool `json:"registered"` - Identity string `json:"identity,omitempty"` + DID string `json:"did,omitempty"` ID *UUID `json:"id,omitempty"` } diff --git a/pkg/fftypes/node_test.go b/pkg/fftypes/node_test.go index b4abf5fe0c..1ac3f93e12 100644 --- a/pkg/fftypes/node_test.go +++ b/pkg/fftypes/node_test.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -17,38 +17,52 @@ package fftypes import ( - "context" "testing" "github.com/stretchr/testify/assert" ) -func TestNodeValidation(t *testing.T) { +func TestNodeigration(t *testing.T) { - n := &Node{ - Name: "!name", + node := DeprecatedNode{ + ID: NewUUID(), + Name: "node1", + Description: "Node 1", + DX: DeprecatedDXInfo{ + Peer: "ignored", + Endpoint: JSONObject{ + "id": "peer1", + }, + }, } - assert.Regexp(t, "FF10131.*name", n.Validate(context.Background(), false)) + parentID := NewUUID() + assert.Equal(t, &IdentityClaim{ + Identity: &Identity{ + IdentityBase: IdentityBase{ + ID: node.ID, + Type: IdentityTypeNode, + DID: "did:firefly:node/node1", + Namespace: SystemNamespace, + Name: "node1", + Parent: parentID, + }, + IdentityProfile: IdentityProfile{ + Description: "Node 1", + Profile: JSONObject{ + "id": "peer1", + }, + }, + }, + }, node.AddMigratedParent(parentID)) - n = &Node{ - Name: "ok", - Description: string(make([]byte, 4097)), - } - assert.Regexp(t, "FF10188.*description", n.Validate(context.Background(), false)) + assert.Equal(t, "14c4157d50d35470b15a6576affa62adea1b191e8238f2273a099d1ef73fb335", node.Topic()) - n = &Node{ - Name: "ok", - Description: "ok", + msg := &Message{ + Header: MessageHeader{ + ID: NewUUID(), + }, } - assert.Regexp(t, "FF10211", n.Validate(context.Background(), false)) - - n.Owner = "0x12345" - assert.NoError(t, n.Validate(context.Background(), false)) - - assert.Regexp(t, "FF10203", n.Validate(context.Background(), true)) + node.SetBroadcastMessage(msg.Header.ID) + assert.Equal(t, msg.Header.ID, node.Migrated().Identity.Messages.Claim) - var def Definition = n - assert.Equal(t, "ff_organizations", def.Topic()) - def.SetBroadcastMessage(NewUUID()) - assert.NotNil(t, n.Message) } diff --git a/pkg/fftypes/organization.go b/pkg/fftypes/organization.go index db771a11b8..dbfa26c269 100644 --- a/pkg/fftypes/organization.go +++ b/pkg/fftypes/organization.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -16,20 +16,11 @@ package fftypes -import ( - "context" - "fmt" +import "context" - "github.com/hyperledger/firefly/internal/i18n" -) - -const ( - FireflyOrgDIDPrefix = "did:firefly:org/" - OrgTopic = "ff_organizations" -) - -// Organization is a top-level identity in the network -type Organization struct { +// DeprecatedOrganization is the data structure we used to use prior to FIR-9. +// Now we use the common Identity structure throughout +type DeprecatedOrganization struct { ID *UUID `json:"id"` Message *UUID `json:"message,omitempty"` Parent string `json:"parent,omitempty"` @@ -38,34 +29,40 @@ type Organization struct { Description string `json:"description,omitempty"` Profile JSONObject `json:"profile,omitempty"` Created *FFTime `json:"created,omitempty"` + + identityClaim *IdentityClaim } -func (org *Organization) Validate(ctx context.Context, existing bool) (err error) { - if err = ValidateFFNameFieldNoUUID(ctx, org.Name, "name"); err != nil { - return err - } - if err = ValidateLength(ctx, org.Description, "description", 4096); err != nil { - return err +// Migrate creates and maintains a migrated IdentityClaim object, which +// is used when processing an old-style organization broadcast received when +// joining an existing network +func (org *DeprecatedOrganization) Migrated() *IdentityClaim { + if org.identityClaim != nil { + return org.identityClaim } - if existing { - if org.ID == nil { - return i18n.NewError(ctx, i18n.MsgNilID) - } + org.identityClaim = &IdentityClaim{ + Identity: &Identity{ + IdentityBase: IdentityBase{ + ID: org.ID, + Type: IdentityTypeOrg, + Namespace: SystemNamespace, + Name: org.Name, + Parent: nil, // No support for child identity migration (see FIR-9 for details) + }, + IdentityProfile: IdentityProfile{ + Description: org.Description, + Profile: org.Profile, + }, + }, } - return nil + org.identityClaim.Identity.DID, _ = org.identityClaim.Identity.GenerateDID(context.Background()) + return org.identityClaim } -func (org *Organization) Topic() string { - return OrgTopic +func (org *DeprecatedOrganization) Topic() string { + return org.Migrated().Topic() } -func (org *Organization) SetBroadcastMessage(msgID *UUID) { - org.Message = msgID -} - -func (org *Organization) GetDID() string { - if org == nil { - return "" - } - return fmt.Sprintf("%s%s", FireflyOrgDIDPrefix, org.ID) +func (org *DeprecatedOrganization) SetBroadcastMessage(msgID *UUID) { + org.Migrated().SetBroadcastMessage(msgID) } diff --git a/pkg/fftypes/organization_test.go b/pkg/fftypes/organization_test.go index 8da4c2e491..d95c45ee32 100644 --- a/pkg/fftypes/organization_test.go +++ b/pkg/fftypes/organization_test.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -17,49 +17,47 @@ package fftypes import ( - "context" - "fmt" "testing" "github.com/stretchr/testify/assert" ) -func TestOrganizationValidation(t *testing.T) { +func TestOrgMigration(t *testing.T) { - org := &Organization{ - Name: "!name", + org := DeprecatedOrganization{ + ID: NewUUID(), + Name: "org1", + Description: "Org 1", + Profile: JSONObject{ + "test": "profile", + }, } - assert.Regexp(t, "FF10131.*name", org.Validate(context.Background(), false)) - - org = &Organization{ - Name: "ok", - Description: string(make([]byte, 4097)), - } - assert.Regexp(t, "FF10188.*description", org.Validate(context.Background(), false)) - - org = &Organization{ - Name: "ok", - Description: "ok", - Identity: "ok", - } - assert.NoError(t, org.Validate(context.Background(), false)) - - assert.Regexp(t, "FF10203", org.Validate(context.Background(), true)) - - var def Definition = org - assert.Equal(t, "ff_organizations", def.Topic()) - def.SetBroadcastMessage(NewUUID()) - assert.NotNil(t, org.Message) -} - -func TestGetDID(t *testing.T) { - - var org *Organization - assert.Equal(t, "", org.GetDID()) - - org = &Organization{ - ID: NewUUID(), + assert.Equal(t, &IdentityClaim{ + Identity: &Identity{ + IdentityBase: IdentityBase{ + ID: org.ID, + Type: IdentityTypeOrg, + DID: "did:firefly:org/org1", + Namespace: SystemNamespace, + Name: "org1", + }, + IdentityProfile: IdentityProfile{ + Description: "Org 1", + Profile: JSONObject{ + "test": "profile", + }, + }, + }, + }, org.Migrated()) + + assert.Equal(t, "7ea456fa05fc63778e7c4cb22d0498d73f184b2778c11fd2ba31b5980f8490b9", org.Topic()) + + msg := &Message{ + Header: MessageHeader{ + ID: NewUUID(), + }, } - assert.Equal(t, fmt.Sprintf("did:firefly:org/%s", org.ID), org.GetDID()) + org.SetBroadcastMessage(msg.Header.ID) + assert.Equal(t, msg.Header.ID, org.Migrated().Identity.Messages.Claim) } diff --git a/pkg/fftypes/pin.go b/pkg/fftypes/pin.go index 540220f9f6..f58a5f2475 100644 --- a/pkg/fftypes/pin.go +++ b/pkg/fftypes/pin.go @@ -1,4 +1,4 @@ -// Copyright © 2021 Kaleido, Inc. +// Copyright © 2022 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // @@ -43,6 +43,7 @@ type Pin struct { Batch *UUID `json:"batch,omitempty"` Index int64 `json:"index,omitempty"` Dispatched bool `json:"dispatched,omitempty"` + Signer string `json:"signer,omitempty"` Created *FFTime `json:"created,omitempty"` } diff --git a/pkg/fftypes/verifier.go b/pkg/fftypes/verifier.go new file mode 100644 index 0000000000..ab320e013c --- /dev/null +++ b/pkg/fftypes/verifier.go @@ -0,0 +1,57 @@ +// Copyright © 2022 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package fftypes + +import "crypto/sha256" + +// VerifierType is the type of an identity berifier. Where possible we use established DID verifier type strings +type VerifierType = FFEnum + +var ( + // VerifierTypeEthAddress is an Ethereum (secp256k1) address string + VerifierTypeEthAddress VerifierType = ffEnum("verifiertype", "ethereum_address") + // VerifierTypeMSPIdentity is the MSP id (X509 distinguished name) of an issued signing certificate / keypair + VerifierTypeMSPIdentity VerifierType = ffEnum("verifiertype", "fabric_msp_id") + // VerifierTypeFFDXPeerID is the peer identifier that FireFly Data Exchange verifies (using plugin specific tech) when receiving data + VerifierTypeFFDXPeerID VerifierType = ffEnum("verifiertype", "dx_peer_id") +) + +// VerifierRef is just the type + value (public key identifier etc.) from the verifier +type VerifierRef struct { + Type VerifierType `json:"type" ffenum:"verifiertype"` + Value string `json:"value"` +} + +// Verifier is an identity verification system that has been established for this identity, such as a blockchain signing key identifier +type Verifier struct { + Hash *Bytes32 `json:"hash"` // Used to ensure the same ID is generated on each node, but not critical for verification. In v0.13 migration was set to the ID of the parent. + Identity *UUID `json:"identity,omitempty"` + Namespace string `json:"namespace,omitempty"` + VerifierRef + Created *FFTime `json:"created,omitempty"` +} + +// Seal updates the hash to be deterministically generated from the namespace+type+value, such that +// it will be the same on every node, and unique. +func (v *Verifier) Seal() *Verifier { + h := sha256.New() + h.Write([]byte(v.Namespace)) + h.Write([]byte(v.Type)) + h.Write([]byte(v.Value)) + v.Hash = HashResult(h) + return v +} diff --git a/pkg/fftypes/verifier_test.go b/pkg/fftypes/verifier_test.go new file mode 100644 index 0000000000..7212e9a02e --- /dev/null +++ b/pkg/fftypes/verifier_test.go @@ -0,0 +1,39 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package fftypes + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestVerifierSeal(t *testing.T) { + + v := &Verifier{ + Identity: NewUUID(), // does not contribute to hash + Namespace: "ns1", + VerifierRef: VerifierRef{ + Type: VerifierTypeEthAddress, + Value: "0xdfceac9b26ac099d7e4df958c22939878c19c948", + }, + Created: Now(), + } + v.Seal() + assert.Equal(t, "c7742ed06a6c36dece56d9c6d65d4ee6ba0db2a643e7f8efc75ec4e7ca31d45d", v.Hash.String()) + +} diff --git a/pkg/wsclient/wsclient.go b/pkg/wsclient/wsclient.go index 08071a837c..6aba1aeb16 100644 --- a/pkg/wsclient/wsclient.go +++ b/pkg/wsclient/wsclient.go @@ -304,12 +304,15 @@ func (w *wsClient) sendLoop(receiverDone chan struct{}) { disconnecting = true } case <-timeoutContext.Done(): + wsconn := w.wsconn if err := w.heartbeatCheck(); err != nil { l.Errorf("WS %s closing: %s", w.url, err) disconnecting = true - } else if err := w.wsconn.WriteMessage(websocket.PingMessage, []byte{}); err != nil { - l.Errorf("WS %s heartbeat send failed: %s", w.url, err) - disconnecting = true + } else if wsconn != nil { + if err := wsconn.WriteMessage(websocket.PingMessage, []byte{}); err != nil { + l.Errorf("WS %s heartbeat send failed: %s", w.url, err) + disconnecting = true + } } case <-receiverDone: l.Debugf("WS %s send loop exiting", w.url) diff --git a/test/e2e/e2e_test.go b/test/e2e/e2e_test.go index d5d6cca0c1..e1411adc86 100644 --- a/test/e2e/e2e_test.go +++ b/test/e2e/e2e_test.go @@ -41,10 +41,13 @@ type testState struct { t *testing.T client1 *resty.Client client2 *resty.Client + ethNode *resty.Client ws1 *websocket.Conn ws2 *websocket.Conn - org1 *fftypes.Organization - org2 *fftypes.Organization + org1 *fftypes.Identity + org1key *fftypes.Verifier + org2 *fftypes.Identity + org2key *fftypes.Verifier done func() } @@ -191,6 +194,14 @@ func beforeE2ETest(t *testing.T) *testState { ts.client1.SetBaseURL(fmt.Sprintf("%s://%s%s/api/v1", httpProtocolClient1, stack.Members[0].FireflyHostname, member0WithPort)) ts.client2.SetBaseURL(fmt.Sprintf("%s://%s%s/api/v1", httpProtocolClient2, stack.Members[1].FireflyHostname, member1WithPort)) + t.Logf("Blockchain provider: %s", stack.BlockchainProvider) + if stack.BlockchainProvider == "geth" { + ethNodeURL := fmt.Sprintf("%s://%s:%d", httpProtocolClient1, stack.Members[0].FireflyHostname, stack.ExposedBlockchainPort) + t.Logf("Ethereum node URL: %s", ethNodeURL) + ts.ethNode = NewResty(t) + ts.ethNode.SetBaseURL(ethNodeURL) + } + if stack.Members[0].Username != "" && stack.Members[0].Password != "" { t.Log("Setting auth for user 1") ts.client1.SetBasicAuth(stack.Members[0].Username, stack.Members[0].Password) @@ -233,8 +244,12 @@ func beforeE2ETest(t *testing.T) *testState { t.Logf("Waiting for 2 orgs to appear. Currently have: node1=%d node2=%d", len(orgsC1), len(orgsC2)) time.Sleep(3 * time.Second) } + ts.org1key = GetIdentityBlockchainKeys(t, ts.client1, ts.org1.ID, 200)[0] + ts.org2key = GetIdentityBlockchainKeys(t, ts.client2, ts.org2.ID, 200)[0] + t.Logf("Org1: ID=%s DID=%s Key=%s", ts.org1.DID, ts.org1.ID, ts.org1key.Value) + t.Logf("Org2: ID=%s DID=%s Key=%s", ts.org2.DID, ts.org2.ID, ts.org2key.Value) - eventNames := "message_confirmed|token_pool_confirmed|token_transfer_confirmed|blockchain_event|token_approval_confirmed" + eventNames := "message_confirmed|token_pool_confirmed|token_transfer_confirmed|blockchain_event|token_approval_confirmed|identity_confirmed" queryString := fmt.Sprintf("namespace=default&ephemeral&autoack&filter.events=%s&changeevents=.*", eventNames) wsUrl1 := url.URL{ @@ -271,17 +286,19 @@ func beforeE2ETest(t *testing.T) *testState { return ts } -func wsReader(conn *websocket.Conn) (chan *fftypes.EventDelivery, chan *fftypes.ChangeEvent) { +func wsReader(conn *websocket.Conn, dbChanges bool) (chan *fftypes.EventDelivery, chan *fftypes.ChangeEvent) { events := make(chan *fftypes.EventDelivery, 100) - changeEvents := make(chan *fftypes.ChangeEvent, 100) + var changeEvents chan *fftypes.ChangeEvent + if dbChanges { + changeEvents = make(chan *fftypes.ChangeEvent, 100) + } go func() { for { _, b, err := conn.ReadMessage() if err != nil { - fmt.Printf("Websocket %s closing, error: %s", conn.RemoteAddr(), err) + fmt.Printf("Websocket %s closing, error: %s\n", conn.RemoteAddr(), err) return } - fmt.Printf("Websocket %s receive: %s", conn.RemoteAddr(), b) var wsa fftypes.WSClientActionBase err = json.Unmarshal(b, &wsa) if err != nil { @@ -295,7 +312,11 @@ func wsReader(conn *websocket.Conn) (chan *fftypes.EventDelivery, chan *fftypes. panic(fmt.Errorf("Invalid JSON received on WebSocket: %s", err)) } if err == nil { - changeEvents <- wscn.ChangeEvent + // Throw away DB changes if the caller doesn't want them + fmt.Printf("Websocket %s change event: %s/%s/%s\n", conn.RemoteAddr(), wscn.ChangeEvent.Namespace, wscn.ChangeEvent.Collection, wscn.ChangeEvent.Type) + if dbChanges { + changeEvents <- wscn.ChangeEvent + } } default: var ed fftypes.EventDelivery @@ -304,6 +325,7 @@ func wsReader(conn *websocket.Conn) (chan *fftypes.EventDelivery, chan *fftypes. panic(fmt.Errorf("Invalid JSON received on WebSocket: %s", err)) } if err == nil { + fmt.Printf("Websocket %s event: %s/%s/%s -> %s (tx=%s)\n", conn.RemoteAddr(), ed.Namespace, ed.Type, ed.ID, ed.Reference, ed.Transaction) events <- &ed } } @@ -332,6 +354,17 @@ func waitForMessageConfirmed(t *testing.T, c chan *fftypes.EventDelivery, msgTyp } } +func waitForIdentityConfirmed(t *testing.T, c chan *fftypes.EventDelivery) *fftypes.EventDelivery { + for { + ed := <-c + if ed.Type == fftypes.EventTypeIdentityConfirmed { + t.Logf("Detected '%s' event for identity '%s'", ed.Type, ed.Reference) + return ed + } + t.Logf("Ignored event '%s'", ed.ID) + } +} + func waitForContractEvent(t *testing.T, client *resty.Client, c chan *fftypes.EventDelivery, match map[string]interface{}) map[string]interface{} { for { eventDelivery := <-c diff --git a/test/e2e/ethereum_contract_test.go b/test/e2e/ethereum_contract_test.go index 1d9a6f0d50..aa3e2175b5 100644 --- a/test/e2e/ethereum_contract_test.go +++ b/test/e2e/ethereum_contract_test.go @@ -170,7 +170,7 @@ func (suite *EthereumContractTestSuite) SetupSuite() { suite.ethClient = NewResty(suite.T()) suite.ethClient.SetBaseURL(fmt.Sprintf("http://localhost:%d", stack.Members[0].ExposedConnectorPort)) - suite.ethIdentity = suite.testState.org1.Identity + suite.ethIdentity = suite.testState.org1key.Value abiResult := uploadABI(suite.T(), suite.ethClient, abi) contractResult := deployABI(suite.T(), suite.ethClient, suite.ethIdentity, abiResult.ID) @@ -192,7 +192,7 @@ func (suite *EthereumContractTestSuite) BeforeTest(suiteName, testName string) { func (suite *EthereumContractTestSuite) TestE2EContractEvents() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) + received1, changes1 := wsReader(suite.testState.ws1, true) sub := CreateContractSubscription(suite.T(), suite.testState.client1, simpleStorageFFIChanged(), &fftypes.JSONObject{ "address": suite.contractAddress, @@ -218,7 +218,7 @@ func (suite *EthereumContractTestSuite) TestE2EContractEvents() { }, "output": map[string]interface{}{ "_value": "1", - "_from": suite.testState.org1.Identity, + "_from": suite.testState.org1key.Value, }, "subscription": sub.ID.String(), } @@ -230,7 +230,7 @@ func (suite *EthereumContractTestSuite) TestE2EContractEvents() { func (suite *EthereumContractTestSuite) TestDirectInvokeMethod() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) + received1, changes1 := wsReader(suite.testState.ws1, true) sub := CreateContractSubscription(suite.T(), suite.testState.client1, simpleStorageFFIChanged(), &fftypes.JSONObject{ "address": suite.contractAddress, @@ -264,7 +264,7 @@ func (suite *EthereumContractTestSuite) TestDirectInvokeMethod() { }, "output": map[string]interface{}{ "_value": "2", - "_from": suite.testState.org1.Identity, + "_from": suite.testState.org1key.Value, }, "subscription": sub.ID.String(), } @@ -286,7 +286,7 @@ func (suite *EthereumContractTestSuite) TestDirectInvokeMethod() { func (suite *EthereumContractTestSuite) TestFFIInvokeMethod() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) + received1, changes1 := wsReader(suite.testState.ws1, true) sub := CreateContractSubscription(suite.T(), suite.testState.client1, simpleStorageFFIChanged(), &fftypes.JSONObject{ "address": suite.contractAddress, @@ -321,7 +321,7 @@ func (suite *EthereumContractTestSuite) TestFFIInvokeMethod() { }, "output": map[string]interface{}{ "_value": "3", - "_from": suite.testState.org1.Identity, + "_from": suite.testState.org1key.Value, }, "subscription": sub.ID.String(), } diff --git a/test/e2e/ethereum_test.go b/test/e2e/ethereum_test.go index fd2ddeefb4..5e54dc617b 100644 --- a/test/e2e/ethereum_test.go +++ b/test/e2e/ethereum_test.go @@ -23,6 +23,7 @@ import ( ) func TestEthereumE2ESuite(t *testing.T) { + suite.Run(t, new(IdentityTestSuite)) suite.Run(t, new(OnChainOffChainTestSuite)) suite.Run(t, new(TokensTestSuite)) suite.Run(t, new(EthereumContractTestSuite)) diff --git a/test/e2e/fabric_contract_test.go b/test/e2e/fabric_contract_test.go index facdfc4457..81a7f035c0 100644 --- a/test/e2e/fabric_contract_test.go +++ b/test/e2e/fabric_contract_test.go @@ -140,7 +140,7 @@ func (suite *FabricContractTestSuite) BeforeTest(suiteName, testName string) { func (suite *FabricContractTestSuite) TestE2EContractEvents() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) + received1, changes1 := wsReader(suite.testState.ws1, true) sub := CreateContractSubscription(suite.T(), suite.testState.client1, assetCreatedEvent, &fftypes.JSONObject{ "channel": "firefly", diff --git a/test/e2e/identity_test.go b/test/e2e/identity_test.go new file mode 100644 index 0000000000..103dc6e299 --- /dev/null +++ b/test/e2e/identity_test.go @@ -0,0 +1,138 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package e2e + +import ( + "context" + "fmt" + "time" + + "github.com/hyperledger/firefly/pkg/fftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" +) + +type IdentityTestSuite struct { + suite.Suite + testState *testState +} + +func (suite *IdentityTestSuite) BeforeTest(suiteName, testName string) { + suite.testState = beforeE2ETest(suite.T()) +} + +func (suite *IdentityTestSuite) TestCustomChildIdentityBroadcasts() { + defer suite.testState.done() + + ctx := context.Background() + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) + + // Create some keys + totalIdentities := 3 + keys := make([]string, totalIdentities) + for i := 0; i < totalIdentities; i++ { + keys[i] = CreateEthAccount(suite.T(), suite.testState.ethNode) + } + + ts := time.Now().Unix() + for i := 0; i < totalIdentities; i++ { + ClaimCustomIdentity(suite.T(), + suite.testState.client1, + keys[i], + fmt.Sprintf("custom_%d_%d", ts, i), + fmt.Sprintf("Description %d", i), + fftypes.JSONObject{"profile": i}, + suite.testState.org1.ID, + false) + } + + identityIDs := make(map[fftypes.UUID]bool) + for i := 0; i < totalIdentities; i++ { + ed := waitForIdentityConfirmed(suite.T(), received1) + identityIDs[*ed.Reference] = true + ed = waitForIdentityConfirmed(suite.T(), received2) + identityIDs[*ed.Reference] = true + } + assert.Len(suite.T(), identityIDs, totalIdentities) + + identities := make(map[string]*fftypes.Identity) + for identityID := range identityIDs { + identityNode1 := GetIdentity(suite.T(), suite.testState.client1, &identityID) + identityNode2 := GetIdentity(suite.T(), suite.testState.client1, &identityID) + assert.True(suite.T(), identityNode1.IdentityBase.Equals(ctx, &identityNode2.IdentityBase)) + identities[identityNode1.DID] = identityNode1 + } + + // Send a broadcast from each custom identity + for did := range identities { + resp, err := BroadcastMessageAsIdentity(suite.T(), suite.testState.client1, did, "identitytest", &fftypes.DataRefOrValue{ + Value: fftypes.JSONAnyPtr(`{"some": "data"}`), + }, false) + require.NoError(suite.T(), err) + assert.Equal(suite.T(), 202, resp.StatusCode()) + } + for range identities { + waitForMessageConfirmed(suite.T(), received1, fftypes.MessageTypeBroadcast) + waitForMessageConfirmed(suite.T(), received2, fftypes.MessageTypeBroadcast) + } + +} + +func (suite *IdentityTestSuite) TestCustomChildIdentityPrivate() { + defer suite.testState.done() + + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) + + // Create an identity on both sides + org1Key := CreateEthAccount(suite.T(), suite.testState.ethNode) + org2Key := CreateEthAccount(suite.T(), suite.testState.ethNode) + + ts := time.Now().Unix() + custom1 := ClaimCustomIdentity(suite.T(), + suite.testState.client1, + org1Key, + fmt.Sprintf("custom_%d_org1priv", ts), + fmt.Sprintf("Description org1priv"), + nil, + suite.testState.org1.ID, + true) + custom2 := ClaimCustomIdentity(suite.T(), + suite.testState.client2, + org2Key, + fmt.Sprintf("custom_%d_org2priv", ts), + fmt.Sprintf("Description org2priv"), + nil, + suite.testState.org2.ID, + true) + for i := 0; i < 2; i++ { + waitForIdentityConfirmed(suite.T(), received1) + waitForIdentityConfirmed(suite.T(), received2) + } + + resp, err := PrivateMessageWithKey(suite.testState, suite.testState.client1, org1Key, "topic1", &fftypes.DataRefOrValue{ + Value: fftypes.JSONAnyPtr(`"test private custom identity"`), + }, []string{custom1.DID, custom2.DID}, "tag1", fftypes.TransactionTypeBatchPin, true) + require.NoError(suite.T(), err) + assert.Equal(suite.T(), 200, resp.StatusCode()) + + waitForMessageConfirmed(suite.T(), received1, fftypes.MessageTypePrivate) + waitForMessageConfirmed(suite.T(), received2, fftypes.MessageTypePrivate) + +} diff --git a/test/e2e/onchain_offchain_test.go b/test/e2e/onchain_offchain_test.go index 91c80092c3..51199d621f 100644 --- a/test/e2e/onchain_offchain_test.go +++ b/test/e2e/onchain_offchain_test.go @@ -46,8 +46,8 @@ func (suite *OnChainOffChainTestSuite) BeforeTest(suiteName, testName string) { func (suite *OnChainOffChainTestSuite) TestE2EBroadcast() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) - received2, changes2 := wsReader(suite.testState.ws2) + received1, changes1 := wsReader(suite.testState.ws1, true) + received2, changes2 := wsReader(suite.testState.ws2, true) // Broadcast some messages, that should get batched, across two topics totalMessages := 10 @@ -62,7 +62,7 @@ func (suite *OnChainOffChainTestSuite) TestE2EBroadcast() { expectedData[topic] = append(expectedData[topic], data) - resp, err := BroadcastMessage(suite.testState.client1, topic, data, false) + resp, err := BroadcastMessage(suite.T(), suite.testState.client1, topic, data, false) require.NoError(suite.T(), err) assert.Equal(suite.T(), 202, resp.StatusCode()) } @@ -90,8 +90,8 @@ func (suite *OnChainOffChainTestSuite) TestE2EBroadcast() { func (suite *OnChainOffChainTestSuite) TestStrongDatatypesBroadcast() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) - received2, changes2 := wsReader(suite.testState.ws2) + received1, changes1 := wsReader(suite.testState.ws1, true) + received2, changes2 := wsReader(suite.testState.ws2, true) var resp *resty.Response value := fftypes.JSONAnyPtr(`"Hello"`) @@ -106,7 +106,7 @@ func (suite *OnChainOffChainTestSuite) TestStrongDatatypesBroadcast() { } // Should be rejected as datatype not known - resp, err := BroadcastMessage(suite.testState.client1, "topic1", &data, true) + resp, err := BroadcastMessage(suite.T(), suite.testState.client1, "topic1", &data, true) require.NoError(suite.T(), err) assert.Equal(suite.T(), 400, resp.StatusCode()) assert.Contains(suite.T(), resp.String(), "FF10195") // datatype not found @@ -118,7 +118,7 @@ func (suite *OnChainOffChainTestSuite) TestStrongDatatypesBroadcast() { } dt = CreateDatatype(suite.T(), suite.testState.client1, dt, true) - resp, err = BroadcastMessage(suite.testState.client1, "topic1", &data, true) + resp, err = BroadcastMessage(suite.T(), suite.testState.client1, "topic1", &data, true) require.NoError(suite.T(), err) assert.Equal(suite.T(), 400, resp.StatusCode()) assert.Contains(suite.T(), resp.String(), "FF10198") // does not conform @@ -128,7 +128,7 @@ func (suite *OnChainOffChainTestSuite) TestStrongDatatypesBroadcast() { "name": "mywidget" }`) - resp, err = BroadcastMessage(suite.testState.client1, "topic1", &data, true) + resp, err = BroadcastMessage(suite.T(), suite.testState.client1, "topic1", &data, true) require.NoError(suite.T(), err) assert.Equal(suite.T(), 200, resp.StatusCode()) @@ -141,8 +141,8 @@ func (suite *OnChainOffChainTestSuite) TestStrongDatatypesBroadcast() { func (suite *OnChainOffChainTestSuite) TestStrongDatatypesPrivate() { defer suite.testState.done() - received1, changes1 := wsReader(suite.testState.ws1) - received2, changes2 := wsReader(suite.testState.ws2) + received1, changes1 := wsReader(suite.testState.ws1, true) + received2, changes2 := wsReader(suite.testState.ws2, true) var resp *resty.Response value := fftypes.JSONAnyPtr(`{"foo":"bar"}`) @@ -202,8 +202,8 @@ func (suite *OnChainOffChainTestSuite) TestE2EPrivate() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) // Send 10 messages, that should get batched, across two topics totalMessages := 10 @@ -247,8 +247,8 @@ func (suite *OnChainOffChainTestSuite) TestE2EPrivate() { func (suite *OnChainOffChainTestSuite) TestE2EBroadcastBlob() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) var resp *resty.Response @@ -273,8 +273,8 @@ func (suite *OnChainOffChainTestSuite) TestE2EBroadcastBlob() { func (suite *OnChainOffChainTestSuite) TestE2EPrivateBlobDatatypeTagged() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) var resp *resty.Response @@ -303,8 +303,8 @@ func (suite *OnChainOffChainTestSuite) TestE2EPrivateBlobDatatypeTagged() { func (suite *OnChainOffChainTestSuite) TestE2EWebhookExchange() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) subJSON := `{ "transport": "webhooks", diff --git a/test/e2e/restclient_test.go b/test/e2e/restclient_test.go index 1a9d93bbfb..91dc1833f0 100644 --- a/test/e2e/restclient_test.go +++ b/test/e2e/restclient_test.go @@ -45,6 +45,8 @@ var ( urlGetEvents = "/namespaces/default/events" urlSubscriptions = "/namespaces/default/subscriptions" urlDatatypes = "/namespaces/default/datatypes" + urlIdentities = "/namespaces/default/identities" + urlIdentity = "/namespaces/default/identities/%s" urlTokenPools = "/namespaces/default/tokens/pools" urlTokenMint = "/namespaces/default/tokens/mint" urlTokenBurn = "/namespaces/default/tokens/burn" @@ -58,6 +60,7 @@ var ( urlContractSubscriptions = "/namespaces/default/contracts/subscriptions" urlBlockchainEvents = "/namespaces/default/blockchainevents" urlGetOrganizations = "/network/organizations" + urlGetOrgKeys = "/namespaces/ff_system/identities/%s/verifiers" ) func NewResty(t *testing.T) *resty.Client { @@ -136,7 +139,7 @@ func GetBlob(t *testing.T, client *resty.Client, data *fftypes.Data, expectedSta return blob } -func GetOrgs(t *testing.T, client *resty.Client, expectedStatus int) (orgs []*fftypes.Organization) { +func GetOrgs(t *testing.T, client *resty.Client, expectedStatus int) (orgs []*fftypes.Identity) { path := urlGetOrganizations resp, err := client.R(). SetQueryParam("sort", "created"). @@ -147,6 +150,17 @@ func GetOrgs(t *testing.T, client *resty.Client, expectedStatus int) (orgs []*ff return orgs } +func GetIdentityBlockchainKeys(t *testing.T, client *resty.Client, identityID *fftypes.UUID, expectedStatus int) (verifiers []*fftypes.Verifier) { + path := fmt.Sprintf(urlGetOrgKeys, identityID) + resp, err := client.R(). + SetQueryParam("type", fmt.Sprintf("!=%s", fftypes.VerifierTypeFFDXPeerID)). + SetResult(&verifiers). + Get(path) + require.NoError(t, err) + require.Equal(t, expectedStatus, resp.StatusCode(), "GET %s [%d]: %s", path, resp.StatusCode(), resp.String()) + return verifiers +} + func CreateSubscription(t *testing.T, client *resty.Client, input interface{}, expectedStatus int) *fftypes.Subscription { path := urlSubscriptions var sub fftypes.Subscription @@ -182,18 +196,82 @@ func DeleteSubscription(t *testing.T, client *resty.Client, id *fftypes.UUID) { require.Equal(t, 204, resp.StatusCode(), "DELETE %s [%d]: %s", path, resp.StatusCode(), resp.String()) } -func BroadcastMessage(client *resty.Client, topic string, data *fftypes.DataRefOrValue, confirm bool) (*resty.Response, error) { - return client.R(). +func BroadcastMessage(t *testing.T, client *resty.Client, topic string, data *fftypes.DataRefOrValue, confirm bool) (*resty.Response, error) { + return BroadcastMessageAsIdentity(t, client, "", topic, data, confirm) +} + +func BroadcastMessageAsIdentity(t *testing.T, client *resty.Client, did, topic string, data *fftypes.DataRefOrValue, confirm bool) (*resty.Response, error) { + var msg fftypes.Message + res, err := client.R(). SetBody(fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ Topics: fftypes.FFStringArray{topic}, + SignerRef: fftypes.SignerRef{ + Author: did, + }, }, }, InlineData: fftypes.InlineData{data}, }). SetQueryParam("confirm", strconv.FormatBool(confirm)). + SetResult(&msg). Post(urlBroadcastMessage) + t.Logf("Sent broadcast msg: %s", msg.Header.ID) + return res, err +} + +func CreateEthAccount(t *testing.T, client *resty.Client) string { + createPayload := map[string]interface{}{"jsonrpc": "2.0", "id": 0, "method": "personal_newAccount", "params": []interface{}{""}} + var resBody struct { + Result string `json:"result"` + } + res, err := client.R(). + SetBody(createPayload). + SetResult(&resBody). + Post("/") + assert.NoError(t, err) + assert.Equal(t, 200, res.StatusCode()) + newKey := resBody.Result + t.Logf("New key: %s", newKey) + unlockPayload := map[string]interface{}{"jsonrpc": "2.0", "id": 0, "method": "personal_unlockAccount", "params": []interface{}{newKey, "", 0}} + res, err = client.R(). + SetBody(unlockPayload). + Post("/") + assert.NoError(t, err) + assert.Equal(t, 200, res.StatusCode()) + return newKey +} + +func ClaimCustomIdentity(t *testing.T, client *resty.Client, key, name, desc string, profile fftypes.JSONObject, parent *fftypes.UUID, confirm bool) *fftypes.Identity { + var identity fftypes.Identity + res, err := client.R(). + SetBody(fftypes.IdentityCreateDTO{ + Name: name, + Type: fftypes.IdentityTypeCustom, + Parent: parent, + Key: key, + IdentityProfile: fftypes.IdentityProfile{ + Description: desc, + Profile: profile, + }, + }). + SetQueryParam("confirm", strconv.FormatBool(confirm)). + SetResult(&identity). + Post(urlIdentities) + assert.NoError(t, err) + assert.True(t, res.IsSuccess()) + return &identity +} + +func GetIdentity(t *testing.T, client *resty.Client, id *fftypes.UUID) *fftypes.Identity { + var identity fftypes.Identity + res, err := client.R(). + SetResult(&identity). + Get(fmt.Sprintf(urlIdentity, id)) + assert.NoError(t, err) + assert.True(t, res.IsSuccess()) + return &identity } func CreateBlob(t *testing.T, client *resty.Client, dt *fftypes.DatatypeRef) *fftypes.Data { @@ -284,6 +362,10 @@ func PrivateBlobMessageDatatypeTagged(ts *testState, client *resty.Client, topic } func PrivateMessage(ts *testState, client *resty.Client, topic string, data *fftypes.DataRefOrValue, orgNames []string, tag string, txType fftypes.TransactionType, confirm bool) (*resty.Response, error) { + return PrivateMessageWithKey(ts, client, "", topic, data, orgNames, tag, txType, confirm) +} + +func PrivateMessageWithKey(ts *testState, client *resty.Client, key, topic string, data *fftypes.DataRefOrValue, orgNames []string, tag string, txType fftypes.TransactionType, confirm bool) (*resty.Response, error) { members := make([]fftypes.MemberInput, len(orgNames)) for i, oName := range orgNames { // We let FireFly resolve the friendly name of the org to the identity @@ -297,6 +379,9 @@ func PrivateMessage(ts *testState, client *resty.Client, topic string, data *fft Tag: tag, TxType: txType, Topics: fftypes.FFStringArray{topic}, + SignerRef: fftypes.SignerRef{ + Key: key, + }, }, }, InlineData: fftypes.InlineData{data}, @@ -305,10 +390,13 @@ func PrivateMessage(ts *testState, client *resty.Client, topic string, data *fft Name: fmt.Sprintf("test_%d", ts.startTime.UnixNano()), }, } - return client.R(). + res, err := client.R(). SetBody(msg). SetQueryParam("confirm", strconv.FormatBool(confirm)). + SetResult(&msg.Message). Post(urlPrivateMessage) + ts.t.Logf("Sent private message %s to %+v", msg.Header.ID, msg.Group.Members) + return res, err } func RequestReply(ts *testState, client *resty.Client, data *fftypes.DataRefOrValue, orgNames []string, tag string, txType fftypes.TransactionType) *fftypes.MessageInOut { diff --git a/test/e2e/stack.go b/test/e2e/stack.go index cb177a876d..dfac7b25e9 100644 --- a/test/e2e/stack.go +++ b/test/e2e/stack.go @@ -22,8 +22,10 @@ import ( ) type Stack struct { - Name string `json:"name,omitempty"` - Members []*Member `json:"members,omitempty"` + Name string `json:"name,omitempty"` + ExposedBlockchainPort int `json:"exposedGethPort,omitempty"` + BlockchainProvider string `json:"blockchainProvider"` + Members []*Member `json:"members,omitempty"` } type Member struct { diff --git a/test/e2e/tokens_test.go b/test/e2e/tokens_test.go index a07120e8b8..3e42b7db7a 100644 --- a/test/e2e/tokens_test.go +++ b/test/e2e/tokens_test.go @@ -37,8 +37,8 @@ func (suite *TokensTestSuite) BeforeTest(suiteName, testName string) { func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) pools := GetTokenPools(suite.T(), suite.testState.client1, time.Unix(0, 0)) poolName := fmt.Sprintf("pool%d", len(pools)) @@ -71,8 +71,8 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { approval := &fftypes.TokenApprovalInput{ TokenApproval: fftypes.TokenApproval{ - Key: suite.testState.org1.Identity, - Operator: suite.testState.org2.Identity, + Key: suite.testState.org1key.Value, + Operator: suite.testState.org2key.Value, Approved: true, }, } @@ -97,7 +97,7 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), fftypes.TokenTransferTypeMint, transfers[0].Type) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 1, + suite.testState.org1key.Value: 1, }) waitForEvent(suite.T(), received2, fftypes.EventTypeTransferConfirmed, nil) @@ -107,15 +107,15 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), fftypes.TokenTransferTypeMint, transfers[0].Type) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 1, + suite.testState.org1key.Value: 1, }) transfer = &fftypes.TokenTransferInput{ TokenTransfer: fftypes.TokenTransfer{ - To: suite.testState.org2.Identity, + To: suite.testState.org2key.Value, Amount: *fftypes.NewFFBigInt(1), - From: suite.testState.org1.Identity, - Key: suite.testState.org2.Identity, + From: suite.testState.org1key.Value, + Key: suite.testState.org2key.Value, }, Pool: poolName, Message: &fftypes.MessageInOut{ @@ -138,8 +138,8 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), 1, len(data)) assert.Equal(suite.T(), `"token approval - payment for data"`, data[0].Value.String()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 1, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 1, }) waitForEvent(suite.T(), received2, fftypes.EventTypeMessageConfirmed, transferOut.Message) @@ -149,8 +149,8 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), fftypes.TokenTransferTypeTransfer, transfers[0].Type) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 1, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 1, }) transfer = &fftypes.TokenTransferInput{ @@ -167,8 +167,8 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), "", transfers[0].TokenIndex) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 0, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 0, }) waitForEvent(suite.T(), received1, fftypes.EventTypeTransferConfirmed, nil) @@ -179,30 +179,30 @@ func (suite *TokensTestSuite) TestE2EFungibleTokensAsync() { assert.Equal(suite.T(), "", transfers[0].TokenIndex) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 0, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 0, }) accounts := GetTokenAccounts(suite.T(), suite.testState.client1, poolID) assert.Equal(suite.T(), 2, len(accounts)) - assert.Equal(suite.T(), suite.testState.org2.Identity, accounts[0].Key) - assert.Equal(suite.T(), suite.testState.org1.Identity, accounts[1].Key) + assert.Equal(suite.T(), suite.testState.org2key.Value, accounts[0].Key) + assert.Equal(suite.T(), suite.testState.org1key.Value, accounts[1].Key) accounts = GetTokenAccounts(suite.T(), suite.testState.client2, poolID) assert.Equal(suite.T(), 2, len(accounts)) - assert.Equal(suite.T(), suite.testState.org2.Identity, accounts[0].Key) - assert.Equal(suite.T(), suite.testState.org1.Identity, accounts[1].Key) + assert.Equal(suite.T(), suite.testState.org2key.Value, accounts[0].Key) + assert.Equal(suite.T(), suite.testState.org1key.Value, accounts[1].Key) - accountPools := GetTokenAccountPools(suite.T(), suite.testState.client1, suite.testState.org1.Identity) + accountPools := GetTokenAccountPools(suite.T(), suite.testState.client1, suite.testState.org1key.Value) assert.Equal(suite.T(), *poolID, *accountPools[0].Pool) - accountPools = GetTokenAccountPools(suite.T(), suite.testState.client2, suite.testState.org2.Identity) + accountPools = GetTokenAccountPools(suite.T(), suite.testState.client2, suite.testState.org2key.Value) assert.Equal(suite.T(), *poolID, *accountPools[0].Pool) } func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { defer suite.testState.done() - received1, _ := wsReader(suite.testState.ws1) - received2, _ := wsReader(suite.testState.ws2) + received1, _ := wsReader(suite.testState.ws1, false) + received2, _ := wsReader(suite.testState.ws2, false) pools := GetTokenPools(suite.T(), suite.testState.client1, time.Unix(0, 0)) poolName := fmt.Sprintf("pool%d", len(pools)) @@ -231,8 +231,8 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { approval := &fftypes.TokenApprovalInput{ TokenApproval: fftypes.TokenApproval{ - Key: suite.testState.org1.Identity, - Operator: suite.testState.org2.Identity, + Key: suite.testState.org1key.Value, + Operator: suite.testState.org2key.Value, Approved: true, }, Pool: poolName, @@ -254,7 +254,7 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), "1", transferOut.TokenIndex) assert.Equal(suite.T(), int64(1), transferOut.Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 1, + suite.testState.org1key.Value: 1, }) waitForEvent(suite.T(), received1, fftypes.EventTypeTransferConfirmed, transferOut.LocalID) @@ -265,16 +265,16 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), "1", transfers[0].TokenIndex) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 1, + suite.testState.org1key.Value: 1, }) transfer = &fftypes.TokenTransferInput{ TokenTransfer: fftypes.TokenTransfer{ TokenIndex: "1", - To: suite.testState.org2.Identity, + To: suite.testState.org2key.Value, Amount: *fftypes.NewFFBigInt(1), - From: suite.testState.org1.Identity, - Key: suite.testState.org2.Identity, + From: suite.testState.org1key.Value, + Key: suite.testState.org2key.Value, }, Pool: poolName, Message: &fftypes.MessageInOut{ @@ -293,8 +293,8 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), 1, len(data)) assert.Equal(suite.T(), `"ownership change"`, data[0].Value.String()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 1, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 1, }) waitForEvent(suite.T(), received1, fftypes.EventTypeMessageConfirmed, transferOut.Message) @@ -305,8 +305,8 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), "1", transfers[0].TokenIndex) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 1, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 1, }) transfer = &fftypes.TokenTransferInput{ @@ -321,8 +321,8 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), "1", transferOut.TokenIndex) assert.Equal(suite.T(), int64(1), transferOut.Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client2, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 0, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 0, }) waitForEvent(suite.T(), received2, fftypes.EventTypeTransferConfirmed, transferOut.LocalID) @@ -333,21 +333,21 @@ func (suite *TokensTestSuite) TestE2ENonFungibleTokensSync() { assert.Equal(suite.T(), "1", transfers[0].TokenIndex) assert.Equal(suite.T(), int64(1), transfers[0].Amount.Int().Int64()) validateAccountBalances(suite.T(), suite.testState.client1, poolID, "1", map[string]int64{ - suite.testState.org1.Identity: 0, - suite.testState.org2.Identity: 0, + suite.testState.org1key.Value: 0, + suite.testState.org2key.Value: 0, }) accounts := GetTokenAccounts(suite.T(), suite.testState.client1, poolID) assert.Equal(suite.T(), 2, len(accounts)) - assert.Equal(suite.T(), suite.testState.org2.Identity, accounts[0].Key) - assert.Equal(suite.T(), suite.testState.org1.Identity, accounts[1].Key) + assert.Equal(suite.T(), suite.testState.org2key.Value, accounts[0].Key) + assert.Equal(suite.T(), suite.testState.org1key.Value, accounts[1].Key) accounts = GetTokenAccounts(suite.T(), suite.testState.client2, poolID) assert.Equal(suite.T(), 2, len(accounts)) - assert.Equal(suite.T(), suite.testState.org2.Identity, accounts[0].Key) - assert.Equal(suite.T(), suite.testState.org1.Identity, accounts[1].Key) + assert.Equal(suite.T(), suite.testState.org2key.Value, accounts[0].Key) + assert.Equal(suite.T(), suite.testState.org1key.Value, accounts[1].Key) - accountPools := GetTokenAccountPools(suite.T(), suite.testState.client1, suite.testState.org1.Identity) + accountPools := GetTokenAccountPools(suite.T(), suite.testState.client1, suite.testState.org1key.Value) assert.Equal(suite.T(), *poolID, *accountPools[0].Pool) - accountPools = GetTokenAccountPools(suite.T(), suite.testState.client2, suite.testState.org2.Identity) + accountPools = GetTokenAccountPools(suite.T(), suite.testState.client2, suite.testState.org2key.Value) assert.Equal(suite.T(), *poolID, *accountPools[0].Pool) }