From 575c6a9a54d8c3a0c3e34f9ba8d6196cdabce2b8 Mon Sep 17 00:00:00 2001 From: LucasG0 Date: Tue, 26 Aug 2025 13:18:04 +0200 Subject: [PATCH 1/4] Fix filters parameter --- infrahub_sdk/client.py | 10 +++------ infrahub_sdk/node/node.py | 40 +++++++++++++++++++--------------- tests/integration/test_node.py | 33 ++++++++++++++++++++++++++++ 3 files changed, 58 insertions(+), 25 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 16c1c73a..ccf164c7 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -539,7 +539,6 @@ async def _process_nodes_and_relationships( response: dict[str, Any], schema_kind: str, branch: str, - prefetch_relationships: bool, timeout: int | None = None, ) -> ProcessRelationsNode: """Processes InfrahubNode and their Relationships from the GraphQL query response. @@ -548,7 +547,6 @@ async def _process_nodes_and_relationships( response (dict[str, Any]): The response from the GraphQL query. schema_kind (str): The kind of schema being queried. branch (str): The branch name. - prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data. timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. Returns: @@ -564,10 +562,9 @@ async def _process_nodes_and_relationships( node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships: - await node._process_relationships( - node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout - ) + await node._process_relationships( + node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout + ) return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes) @@ -814,7 +811,6 @@ async def process_page(page_offset: int, page_number: int) -> tuple[dict, Proces response=response, schema_kind=schema.kind, branch=branch, - prefetch_relationships=prefetch_relationships, timeout=timeout, ) return response, process_result diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 242281b5..459f5384 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -730,10 +730,11 @@ async def generate_query_data_node( continue rel_schema = self._schema.get_relationship(name=rel_name) - if not rel_schema or (not inherited and rel_schema.inherited): continue + # Don't fetch many attribute/parent relationships unless they are specified in `include` + # TODO Why wouldn't we we fetch them if prefetch_relationships is True? if ( rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr] and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr] @@ -741,8 +742,8 @@ async def generate_query_data_node( ): continue - peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: + should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include) + if rel_schema and should_fetch_relationship: peer_schema = await self._client.schema.get(kind=rel_schema.peer, branch=self._branch) peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch) peer_data = await peer_node.generate_query_data_node( @@ -751,23 +752,26 @@ async def generate_query_data_node( property=property, ) - if rel_schema and rel_schema.cardinality == "one": - rel_data = RelatedNode._generate_query_data(peer_data=peer_data, property=property) - # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql - # tries to resolve attributes in this ancestor instead of actual node. To avoid - # invalid queries issues when attribute is missing in the common ancestor, we use a fragment - # to explicit actual node kind we are querying. - if rel_schema.kind == RelationshipKind.HIERARCHY: - data_node = rel_data["node"] - rel_data["node"] = {} - rel_data["node"][f"...on {rel_schema.peer}"] = data_node - elif rel_schema and rel_schema.cardinality == "many": - rel_data = RelationshipManager._generate_query_data(peer_data=peer_data, property=property) + # TODO is there a reason why we fetch here even with prefetch_relationships == False? + if rel_schema.cardinality == "one": + rel_data = RelatedNode._generate_query_data(peer_data=peer_data, property=property) + # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql + # tries to resolve attributes in this ancestor instead of actual node. To avoid + # invalid queries issues when attribute is missing in the common ancestor, we use a fragment + # to explicit actual node kind we are querying. + if rel_schema.kind == RelationshipKind.HIERARCHY: + data_node = rel_data["node"] + rel_data["node"] = {} + rel_data["node"][f"...on {rel_schema.peer}"] = data_node + elif rel_schema.cardinality == "many": + rel_data = RelationshipManager._generate_query_data(peer_data=peer_data, property=property) + else: + raise ValueError(f"Unknown relationship cardinality {rel_schema.cardinality}") - data[rel_name] = rel_data + data[rel_name] = rel_data - if insert_alias: - data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" + if insert_alias: + data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" return data diff --git a/tests/integration/test_node.py b/tests/integration/test_node.py index 3716dc1a..e329f53a 100644 --- a/tests/integration/test_node.py +++ b/tests/integration/test_node.py @@ -85,6 +85,39 @@ async def test_node_create_with_relationships_using_related_node( assert node_after.owner.peer.id == person_joe.id assert node_after.owner.peer.typename == "TestingPerson" + async def test_node_filters_include( + self, + default_branch: str, + client: InfrahubClient, + initial_schema: None, + manufacturer_mercedes, + person_joe, + tag_red, + ) -> None: + car = await client.create( + kind=TESTING_CAR, + name="Tiguan2", + color="Black", + manufacturer=manufacturer_mercedes, + owner=person_joe, + tags=[tag_red], + ) + await car.save(allow_upsert=True) + assert car.id is not None + + node_after = await client.get(kind=TESTING_CAR, id=car.id) + + with pytest.raises(ValueError): + # match=r"Node must have at least one identifier (ID or HFID) to query it." + _ = node_after.owner.peer + + assert len(node_after.tags.peers) == 0 + + # Test both one and many relationships + node_after = await client.get(kind=TESTING_CAR, id=car.id, include=["tags", "owner"]) + assert [tag.id for tag in node_after.tags.peers] == [tag_red.id] + assert node_after.owner.peer.id == person_joe.id, f"{person_joe.id=}" + async def test_node_update_with_original_data( self, default_branch: str, From 1cadd6f816cd65e68da54ac6c99e938cf92c0a0b Mon Sep 17 00:00:00 2001 From: LucasG0 Date: Tue, 26 Aug 2025 13:33:30 +0200 Subject: [PATCH 2/4] remove nested include/exclude --- infrahub_sdk/client.py | 16 +++++++++--- infrahub_sdk/node/node.py | 48 ++++++++++++++++++---------------- tests/integration/test_node.py | 5 ++-- 3 files changed, 41 insertions(+), 28 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index ccf164c7..642bb32e 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -539,6 +539,8 @@ async def _process_nodes_and_relationships( response: dict[str, Any], schema_kind: str, branch: str, + prefetch_relationships: bool, + include: list[str] | None, timeout: int | None = None, ) -> ProcessRelationsNode: """Processes InfrahubNode and their Relationships from the GraphQL query response. @@ -547,6 +549,7 @@ async def _process_nodes_and_relationships( response (dict[str, Any]): The response from the GraphQL query. schema_kind (str): The kind of schema being queried. branch (str): The branch name. + prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data. timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. Returns: @@ -562,9 +565,14 @@ async def _process_nodes_and_relationships( node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - await node._process_relationships( - node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout - ) + if prefetch_relationships or include is not None: + await node._process_relationships( + node_data=item, + branch=branch, + related_nodes=related_nodes, + timeout=timeout, + include=include, + ) return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes) @@ -811,7 +819,9 @@ async def process_page(page_offset: int, page_number: int) -> tuple[dict, Proces response=response, schema_kind=schema.kind, branch=branch, + prefetch_relationships=prefetch_relationships, timeout=timeout, + include=include, ) return response, process_result diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index 459f5384..ac387c9c 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -730,11 +730,10 @@ async def generate_query_data_node( continue rel_schema = self._schema.get_relationship(name=rel_name) + if not rel_schema or (not inherited and rel_schema.inherited): continue - # Don't fetch many attribute/parent relationships unless they are specified in `include` - # TODO Why wouldn't we we fetch them if prefetch_relationships is True? if ( rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr] and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr] @@ -742,36 +741,32 @@ async def generate_query_data_node( ): continue + peer_data: dict[str, Any] = {} should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include) if rel_schema and should_fetch_relationship: peer_schema = await self._client.schema.get(kind=rel_schema.peer, branch=self._branch) peer_node = InfrahubNode(client=self._client, schema=peer_schema, branch=self._branch) peer_data = await peer_node.generate_query_data_node( - include=include, - exclude=exclude, property=property, ) - # TODO is there a reason why we fetch here even with prefetch_relationships == False? - if rel_schema.cardinality == "one": - rel_data = RelatedNode._generate_query_data(peer_data=peer_data, property=property) - # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql - # tries to resolve attributes in this ancestor instead of actual node. To avoid - # invalid queries issues when attribute is missing in the common ancestor, we use a fragment - # to explicit actual node kind we are querying. - if rel_schema.kind == RelationshipKind.HIERARCHY: - data_node = rel_data["node"] - rel_data["node"] = {} - rel_data["node"][f"...on {rel_schema.peer}"] = data_node - elif rel_schema.cardinality == "many": - rel_data = RelationshipManager._generate_query_data(peer_data=peer_data, property=property) - else: - raise ValueError(f"Unknown relationship cardinality {rel_schema.cardinality}") + if rel_schema and rel_schema.cardinality == "one": + rel_data = RelatedNode._generate_query_data(peer_data=peer_data, property=property) + # Nodes involved in a hierarchy are required to inherit from a common ancestor node, and graphql + # tries to resolve attributes in this ancestor instead of actual node. To avoid + # invalid queries issues when attribute is missing in the common ancestor, we use a fragment + # to explicit actual node kind we are querying. + if rel_schema.kind == RelationshipKind.HIERARCHY: + data_node = rel_data["node"] + rel_data["node"] = {} + rel_data["node"][f"...on {rel_schema.peer}"] = data_node + elif rel_schema and rel_schema.cardinality == "many": + rel_data = RelationshipManager._generate_query_data(peer_data=peer_data, property=property) - data[rel_name] = rel_data + data[rel_name] = rel_data - if insert_alias: - data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" + if insert_alias: + data[rel_name]["@alias"] = f"__alias__{self._schema.kind}__{rel_name}" return data @@ -890,7 +885,12 @@ async def update( await self._process_mutation_result(mutation_name=mutation_name, response=response, timeout=timeout) async def _process_relationships( - self, node_data: dict[str, Any], branch: str, related_nodes: list[InfrahubNode], timeout: int | None = None + self, + node_data: dict[str, Any], + branch: str, + related_nodes: list[InfrahubNode], + timeout: int | None = None, + include: list[str] | None = None, ) -> None: """Processes the Relationships of a InfrahubNode and add Related Nodes to a list. @@ -901,6 +901,8 @@ async def _process_relationships( timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. """ for rel_name in self._relationships: + if include is not None and rel_name not in include: + continue rel = getattr(self, rel_name) if rel and isinstance(rel, RelatedNode): relation = node_data["node"].get(rel_name, None) diff --git a/tests/integration/test_node.py b/tests/integration/test_node.py index e329f53a..652e7203 100644 --- a/tests/integration/test_node.py +++ b/tests/integration/test_node.py @@ -105,10 +105,11 @@ async def test_node_filters_include( await car.save(allow_upsert=True) assert car.id is not None + # Clear store, as when we call `owner.peer`, we actually rely on the peer having being stored in store. + client.store._branches = {} node_after = await client.get(kind=TESTING_CAR, id=car.id) - with pytest.raises(ValueError): - # match=r"Node must have at least one identifier (ID or HFID) to query it." + with pytest.raises(NodeNotFoundError, match=f"Unable to find the node '{person_joe.id}' in the store"): _ = node_after.owner.peer assert len(node_after.tags.peers) == 0 From f344e760b949d2235e6f75a01d41dd139676aaa6 Mon Sep 17 00:00:00 2001 From: LucasG0 Date: Thu, 28 Aug 2025 17:26:54 +0200 Subject: [PATCH 3/4] fix test include --- infrahub_sdk/client.py | 8 ++++++-- infrahub_sdk/node/node.py | 6 ++---- tests/unit/sdk/conftest.py | 22 ++++++++++++++++++++++ tests/unit/sdk/test_node.py | 23 +++++++++++++++++++---- 4 files changed, 49 insertions(+), 10 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index 642bb32e..f9a380bd 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -558,6 +558,9 @@ async def _process_nodes_and_relationships( - 'related_nodes': A list of InfrahubNode objects representing the related nodes """ + # Ideally, include and relationships wouldn't be parameters of this method, they should only + # be used to build the request for the server, and this method would build node according to the response. + nodes: list[InfrahubNode] = [] related_nodes: list[InfrahubNode] = [] @@ -571,7 +574,6 @@ async def _process_nodes_and_relationships( branch=branch, related_nodes=related_nodes, timeout=timeout, - include=include, ) return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes) @@ -1837,6 +1839,7 @@ def _process_nodes_and_relationships( schema_kind: str, branch: str, prefetch_relationships: bool, + include: list[str] | None, timeout: int | None = None, ) -> ProcessRelationsNodeSync: """Processes InfrahubNodeSync and their Relationships from the GraphQL query response. @@ -1861,7 +1864,7 @@ def _process_nodes_and_relationships( node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships: + if prefetch_relationships or include is not None: node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout) return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes) @@ -1986,6 +1989,7 @@ def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelat branch=branch, prefetch_relationships=prefetch_relationships, timeout=timeout, + include=include, ) return response, process_result diff --git a/infrahub_sdk/node/node.py b/infrahub_sdk/node/node.py index ac387c9c..84d774e6 100644 --- a/infrahub_sdk/node/node.py +++ b/infrahub_sdk/node/node.py @@ -890,7 +890,6 @@ async def _process_relationships( branch: str, related_nodes: list[InfrahubNode], timeout: int | None = None, - include: list[str] | None = None, ) -> None: """Processes the Relationships of a InfrahubNode and add Related Nodes to a list. @@ -901,8 +900,6 @@ async def _process_relationships( timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds. """ for rel_name in self._relationships: - if include is not None and rel_name not in include: - continue rel = getattr(self, rel_name) if rel and isinstance(rel, RelatedNode): relation = node_data["node"].get(rel_name, None) @@ -1369,7 +1366,8 @@ def generate_query_data_node( continue peer_data: dict[str, Any] = {} - if rel_schema and prefetch_relationships: + should_fetch_relationship = prefetch_relationships or (include is not None and rel_name in include) + if rel_schema and should_fetch_relationship: peer_schema = self._client.schema.get(kind=rel_schema.peer, branch=self._branch) peer_node = InfrahubNodeSync(client=self._client, schema=peer_schema, branch=self._branch) peer_data = peer_node.generate_query_data_node(include=include, exclude=exclude, property=property) diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index 6943900e..a9c6c667 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -33,6 +33,11 @@ async def client() -> InfrahubClient: return InfrahubClient(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)) +@pytest.fixture +async def client_sync() -> InfrahubClientSync: + return InfrahubClientSync(config=Config(address="http://mock", insert_tracker=True, pagination_size=3)) + + @pytest.fixture async def clients() -> BothClients: both = BothClients( @@ -2641,3 +2646,20 @@ async def mock_query_tasks_05(httpx_mock: HTTPXMock) -> HTTPXMock: is_reusable=True, ) return httpx_mock + + +async def set_builtin_tag_schema_cache(client) -> None: + # Set tag schema in cache to avoid needed to request the server. + builtin_tag_schema = { + "version": "1.0", + "nodes": [ + { + "name": "Tag", + "namespace": "Builtin", + "default_filter": "name__value", + "display_label": "name__value", + "branch": "aware", + } + ], + } + client.schema.set_cache(builtin_tag_schema) diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index 5dd7730b..a0baa077 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -17,6 +17,7 @@ from infrahub_sdk.node.constants import SAFE_VALUE from infrahub_sdk.node.related_node import RelatedNode, RelatedNodeSync from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI +from tests.unit.sdk.conftest import set_builtin_tag_schema_cache if TYPE_CHECKING: from infrahub_sdk.client import InfrahubClient, InfrahubClientSync @@ -1055,12 +1056,19 @@ async def test_query_data_generic_fragment(clients, mock_schema_query_02, client @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_include_property(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_include_property( + client, + client_sync, + location_schema: NodeSchemaAPI, + client_type, +) -> None: if client_type == "standard": + await set_builtin_tag_schema_cache(client) node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(include=["tags"], property=True) else: - node = InfrahubNodeSync(client=client, schema=location_schema) + await set_builtin_tag_schema_cache(client_sync) + node = InfrahubNodeSync(client=client_sync, schema=location_schema) data = node.generate_query_data(include=["tags"], property=True) assert data == { @@ -1178,12 +1186,19 @@ async def test_query_data_include_property(client, location_schema: NodeSchemaAP @pytest.mark.parametrize("client_type", client_types) -async def test_query_data_include(client, location_schema: NodeSchemaAPI, client_type) -> None: +async def test_query_data_include( + client, + client_sync, + location_schema: NodeSchemaAPI, + client_type, +) -> None: if client_type == "standard": + await set_builtin_tag_schema_cache(client) node = InfrahubNode(client=client, schema=location_schema) data = await node.generate_query_data(include=["tags"]) else: - node = InfrahubNodeSync(client=client, schema=location_schema) + await set_builtin_tag_schema_cache(client_sync) + node = InfrahubNodeSync(client=client_sync, schema=location_schema) data = node.generate_query_data(include=["tags"]) assert data == { From 25e8f9fc67698c6fb3db9627df24d281a16f4509 Mon Sep 17 00:00:00 2001 From: LucasG0 Date: Wed, 3 Sep 2025 10:51:14 +0200 Subject: [PATCH 4/4] build relationships only if include contains rel name --- infrahub_sdk/client.py | 7 ++----- tests/unit/sdk/conftest.py | 17 ----------------- tests/unit/sdk/test_node.py | 18 +++++++++++++++++- 3 files changed, 19 insertions(+), 23 deletions(-) diff --git a/infrahub_sdk/client.py b/infrahub_sdk/client.py index f9a380bd..b301f3b0 100644 --- a/infrahub_sdk/client.py +++ b/infrahub_sdk/client.py @@ -558,9 +558,6 @@ async def _process_nodes_and_relationships( - 'related_nodes': A list of InfrahubNode objects representing the related nodes """ - # Ideally, include and relationships wouldn't be parameters of this method, they should only - # be used to build the request for the server, and this method would build node according to the response. - nodes: list[InfrahubNode] = [] related_nodes: list[InfrahubNode] = [] @@ -568,7 +565,7 @@ async def _process_nodes_and_relationships( node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships or include is not None: + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): await node._process_relationships( node_data=item, branch=branch, @@ -1864,7 +1861,7 @@ def _process_nodes_and_relationships( node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout) nodes.append(node) - if prefetch_relationships or include is not None: + if prefetch_relationships or (include and any(rel in include for rel in node._relationships)): node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout) return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes) diff --git a/tests/unit/sdk/conftest.py b/tests/unit/sdk/conftest.py index a9c6c667..5f0d7c2a 100644 --- a/tests/unit/sdk/conftest.py +++ b/tests/unit/sdk/conftest.py @@ -2646,20 +2646,3 @@ async def mock_query_tasks_05(httpx_mock: HTTPXMock) -> HTTPXMock: is_reusable=True, ) return httpx_mock - - -async def set_builtin_tag_schema_cache(client) -> None: - # Set tag schema in cache to avoid needed to request the server. - builtin_tag_schema = { - "version": "1.0", - "nodes": [ - { - "name": "Tag", - "namespace": "Builtin", - "default_filter": "name__value", - "display_label": "name__value", - "branch": "aware", - } - ], - } - client.schema.set_cache(builtin_tag_schema) diff --git a/tests/unit/sdk/test_node.py b/tests/unit/sdk/test_node.py index a0baa077..3cae8b56 100644 --- a/tests/unit/sdk/test_node.py +++ b/tests/unit/sdk/test_node.py @@ -17,7 +17,6 @@ from infrahub_sdk.node.constants import SAFE_VALUE from infrahub_sdk.node.related_node import RelatedNode, RelatedNodeSync from infrahub_sdk.schema import GenericSchema, NodeSchemaAPI -from tests.unit.sdk.conftest import set_builtin_tag_schema_cache if TYPE_CHECKING: from infrahub_sdk.client import InfrahubClient, InfrahubClientSync @@ -54,6 +53,23 @@ ] +async def set_builtin_tag_schema_cache(client) -> None: + # Set tag schema in cache to avoid needed to request the server. + builtin_tag_schema = { + "version": "1.0", + "nodes": [ + { + "name": "Tag", + "namespace": "Builtin", + "default_filter": "name__value", + "display_label": "name__value", + "branch": "aware", + } + ], + } + client.schema.set_cache(builtin_tag_schema) + + async def test_method_sanity() -> None: """Validate that there is at least one public method and that both clients look the same.""" assert async_node_methods