Skip to content

Commit

Permalink
Merge pull request #2611 from dbluhm/feature/did-peer-1-resolve
Browse files Browse the repository at this point in the history
feat: support resolving did:peer:1 received in did exchange
  • Loading branch information
swcurran committed Dec 7, 2023
2 parents c677185 + e498c26 commit 2857a2a
Show file tree
Hide file tree
Showing 10 changed files with 317 additions and 339 deletions.
51 changes: 29 additions & 22 deletions aries_cloudagent/connections/base_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
For Connection, DIDExchange and OutOfBand Manager.
"""

import json
import logging
from typing import List, Optional, Sequence, Text, Tuple, Union

Expand Down Expand Up @@ -132,35 +133,40 @@ async def create_did_document(

return did_doc

async def store_did_document(self, did_doc: DIDDoc):
async def store_did_document(self, value: Union[DIDDoc, dict]):
"""Store a DID document.
Args:
did_doc: The `DIDDoc` instance to persist
value: The `DIDDoc` instance to persist
"""
assert did_doc.did
if isinstance(value, DIDDoc):
did = value.did
doc = value.to_json()
else:
did = value["id"]
doc = json.dumps(value)

# Special case: we used to store did:sov dids as unqualified.
# For backwards compatibility, we'll strip off the prefix.
if did.startswith("did:sov:"):
did = did[8:]

self._logger.debug("Storing DID document for %s: %s", did, doc)

try:
stored_doc, record = await self.fetch_did_document(did_doc.did)
stored_doc, record = await self.fetch_did_document(did)
except StorageNotFoundError:
record = StorageRecord(
self.RECORD_TYPE_DID_DOC,
did_doc.to_json(),
{"did": did_doc.did},
)
record = StorageRecord(self.RECORD_TYPE_DID_DOC, doc, {"did": did})
async with self._profile.session() as session:
storage: BaseStorage = session.inject(BaseStorage)
await storage.add_record(record)
else:
async with self._profile.session() as session:
storage: BaseStorage = session.inject(BaseStorage)
await storage.update_record(
record, did_doc.to_json(), {"did": did_doc.did}
)
await self.remove_keys_for_did(did_doc.did)
for key in did_doc.pubkey.values():
if key.controller == did_doc.did:
await self.add_key_for_did(did_doc.did, key.value)
await storage.update_record(record, doc, {"did": did})

await self.remove_keys_for_did(did)
await self.record_did(did)

async def add_key_for_did(self, did: str, key: str):
"""Store a verkey for lookup against a DID.
Expand Down Expand Up @@ -219,12 +225,12 @@ async def resolve_didcomm_services(
doc: ResolvedDocument = pydid.deserialize_document(doc_dict, strict=True)
except ResolverError as error:
raise BaseConnectionManagerError(
"Failed to resolve public DID in invitation"
"Failed to resolve DID services"
) from error

if not doc.service:
raise BaseConnectionManagerError(
"Cannot connect via public DID that has no associated services"
"Cannot connect via DID that has no associated services"
)

didcomm_services = sorted(
Expand Down Expand Up @@ -580,7 +586,7 @@ async def get_connection_targets(

def diddoc_connection_targets(
self,
doc: DIDDoc,
doc: Optional[Union[DIDDoc, dict]],
sender_verkey: str,
their_label: Optional[str] = None,
) -> Sequence[ConnectionTarget]:
Expand All @@ -591,6 +597,8 @@ def diddoc_connection_targets(
sender_verkey: The verkey we are using
their_label: The connection label they are using
"""
if isinstance(doc, dict):
doc = DIDDoc.deserialize(doc)
if not doc:
raise BaseConnectionManagerError("No DIDDoc provided for connection target")
if not doc.did:
Expand All @@ -617,17 +625,16 @@ def diddoc_connection_targets(
)
return targets

async def fetch_did_document(self, did: str) -> Tuple[DIDDoc, StorageRecord]:
async def fetch_did_document(self, did: str) -> Tuple[dict, StorageRecord]:
"""Retrieve a DID Document for a given DID.
Args:
did: The DID to search for
"""
# legacy documents for unqualified dids
async with self._profile.session() as session:
storage = session.inject(BaseStorage)
record = await storage.find_record(self.RECORD_TYPE_DID_DOC, {"did": did})
return DIDDoc.from_json(record.value), record
return json.loads(record.value), record

async def find_connection(
self,
Expand Down
15 changes: 9 additions & 6 deletions aries_cloudagent/connections/models/conn_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
from ...core.profile import ProfileSession
from ...messaging.models.base_record import BaseRecord, BaseRecordSchema
from ...messaging.valid import (
INDY_DID_EXAMPLE,
INDY_DID_VALIDATE,
GENERIC_DID_EXAMPLE,
GENERIC_DID_VALIDATE,
INDY_RAW_PUBLIC_KEY_EXAMPLE,
INDY_RAW_PUBLIC_KEY_VALIDATE,
UUID4_EXAMPLE,
Expand Down Expand Up @@ -653,15 +653,18 @@ class Meta:
)
my_did = fields.Str(
required=False,
validate=INDY_DID_VALIDATE,
metadata={"description": "Our DID for connection", "example": INDY_DID_EXAMPLE},
validate=GENERIC_DID_VALIDATE,
metadata={
"description": "Our DID for connection",
"example": GENERIC_DID_EXAMPLE,
},
)
their_did = fields.Str(
required=False,
validate=INDY_DID_VALIDATE,
validate=GENERIC_DID_VALIDATE,
metadata={
"description": "Their DID for connection",
"example": INDY_DID_EXAMPLE,
"example": GENERIC_DID_EXAMPLE,
},
)
their_label = fields.Str(
Expand Down
77 changes: 11 additions & 66 deletions aries_cloudagent/connections/tests/test_base_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,7 @@
from ...resolver.default.key import KeyDIDResolver
from ...resolver.default.legacy_peer import LegacyPeerDIDResolver
from ...resolver.did_resolver import DIDResolver
from ...storage.base import BaseStorage
from ...storage.error import StorageNotFoundError
from ...storage.record import StorageRecord
from ...transport.inbound.receipt import MessageReceipt
from ...utils.multiformats import multibase, multicodec
from ...wallet.base import DIDInfo
Expand Down Expand Up @@ -243,70 +241,6 @@ async def test_did_key_storage(self):
assert did == self.test_target_did
await self.manager.remove_keys_for_did(self.test_target_did)

async def test_store_did_document_with_routing_keys(self):
"""Regression test for ensuring agents with the same mediator can connect."""

# Replicate old behavior where routing keys could be stored multiple times
routing_key = "cK7fwfjpakMuv8QKVv2y6qouZddVw4TxZNQPUs2fFTd"
async with self.profile.session() as session:
for _ in range(3):
record = StorageRecord(
self.manager.RECORD_TYPE_DID_KEY,
routing_key,
{"did": "bogus", "key": routing_key},
)
storage = session.inject(BaseStorage)
await storage.add_record(record)

# The DIDDoc class will turn the routing key into a publicKey entry.
# This is NOT the correct behavior for normalizing DID Documents.
# Unforunately, it's been doing it for a long time; to accomodate
# stored records, we need to make sure we can handle duplicate records
# where they shouldn't actually be.
# These records were never used or else we would have seen errors raised
# by find_did_for_key compaining of duplicate records.
doc_with_routing_keys = DIDDoc.deserialize(
{
"@context": "https://w3id.org/did/v1",
"publicKey": [
{
"id": "YQwDgq9vdAbB3fk1tkeXmg#1",
"controller": "YQwDgq9vdAbB3fk1tkeXmg",
"type": "Ed25519VerificationKey2018",
"publicKeyBase58": "J81x9zdJa8CGSbTYpoYQaNrV6yv13M1Lgz4tmkNPKwZn",
},
{
"id": "YQwDgq9vdAbB3fk1tkeXmg#1",
"controller": "YQwDgq9vdAbB3fk1tkeXmg",
"type": "Ed25519VerificationKey2018",
"publicKeyBase58": routing_key,
},
],
"service": [
{
"id": "YQwDgq9vdAbB3fk1tkeXmg#IndyAgentService",
"serviceEndpoint": "https://aries-mediator-agent.vonx.io",
"type": "IndyAgent",
"priority": 0,
"recipientKeys": [
"J81x9zdJa8CGSbTYpoYQaNrV6yv13M1Lgz4tmkNPKwZn"
],
"routingKeys": [routing_key],
}
],
"authentication": [
{
"publicKey": "YQwDgq9vdAbB3fk1tkeXmg#1",
"type": "Ed25519SignatureAuthentication2018",
}
],
"id": "YQwDgq9vdAbB3fk1tkeXmg",
}
)
with self.assertLogs(level="WARNING") as context:
await self.manager.store_did_document(doc_with_routing_keys)
assert context.output and "Key already associated with DID" in context.output[0]

async def test_fetch_connection_targets_no_my_did(self):
mock_conn = mock.MagicMock()
mock_conn.my_did = None
Expand Down Expand Up @@ -1793,3 +1727,14 @@ async def test_get_endpoints(self):
"localhost:8020",
"10.20.30.40:5060",
)

async def test_diddoc_connection_targets_diddoc(self):
did_doc = self.make_did_doc(
self.test_target_did,
self.test_target_verkey,
)
targets = self.manager.diddoc_connection_targets(
did_doc,
self.test_verkey,
)
assert isinstance(targets[0], ConnectionTarget)
Loading

0 comments on commit 2857a2a

Please sign in to comment.