From f4f084b1bccaa94dbdd64d55cf8f3ac089674c0c Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Wed, 22 Dec 2021 13:25:38 -0900 Subject: [PATCH 01/32] Added first draft of potential interface for Neptune --- awswrangler/neptune/__init__.py | 17 ++++ awswrangler/neptune/client.py | 10 ++ awswrangler/neptune/neptune.py | 164 ++++++++++++++++++++++++++++++++ 3 files changed, 191 insertions(+) create mode 100644 awswrangler/neptune/__init__.py create mode 100644 awswrangler/neptune/client.py create mode 100644 awswrangler/neptune/neptune.py diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py new file mode 100644 index 000000000..205e70b59 --- /dev/null +++ b/awswrangler/neptune/__init__.py @@ -0,0 +1,17 @@ +"""Utilities Module for Amazon OpenSearch.""" + +from awswrangler.opensearch._read import search, search_by_sql +from awswrangler.opensearch._utils import connect +from awswrangler.opensearch._write import create_index, delete_index, index_csv, index_df, index_documents, index_json + +__all__ = [ + "connect", + "create_index", + "delete_index", + "index_csv", + "index_documents", + "index_df", + "index_json", + "search", + "search_by_sql", +] diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py new file mode 100644 index 000000000..a5d1dbe83 --- /dev/null +++ b/awswrangler/neptune/client.py @@ -0,0 +1,10 @@ +from boto3 import Session + +DEFAULT_PORT = 8182 +DEFAULT_REGION = 'us-east-1' + +class NeptuneClient(): + def __init__(self, host: str, port: int = DEFAULT_PORT, ssl: bool = True, region: str = DEFAULT_REGION): + self.host = host + self.port = port + self.ssl = ssl diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py new file mode 100644 index 000000000..a4ff9fed2 --- /dev/null +++ b/awswrangler/neptune/neptune.py @@ -0,0 +1,164 @@ +from awswrangler.neptune import client +from typing import Any +import pandas as pd + +def read_gremlin( + client: client, + traversal: str +) -> pd.DataFrame: + """Return results of a Gremlin traversal as pandas dataframe. + + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + traversal : str + The gremlin traversal to execute + + Returns + ------- + Union[pandas.DataFrame, Iterator[pandas.DataFrame]] + Results as Pandas DataFrame + + Examples + -------- + Run a Gremlin Query + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> df = wr.neptune.gremlin.read(client, "g.V().limit(5).valueMap()") + + + """ + raise NotImplementedError + +def read_opencypher( + client: client, + traversal: str +) -> pd.DataFrame: + """Return results of a openCypher traversal as pandas dataframe. + + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + traversal : str + The gremlin traversal to execute + + Returns + ------- + Union[pandas.DataFrame, Iterator[pandas.DataFrame]] + Results as Pandas DataFrame + + Examples + -------- + Run an openCypher query + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> df = wr.neptune.gremlin.read(client, "MATCH (n) RETURN n LIMIT 5") + + + """ + raise NotImplementedError + +def read_sparql( + client: client, + traversal: str +) -> pd.DataFrame: + """Return results of a SPARQL query as pandas dataframe. + + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + traversal : str + The gremlin traversal to execute + + Returns + ------- + Union[pandas.DataFrame, Iterator[pandas.DataFrame]] + Results as Pandas DataFrame + + Examples + -------- + Run a SPARQL query + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> df = wr.neptune.sparql.read(client, "PREFIX foaf: + SELECT ?name + WHERE { + ?person foaf:name ?name . + }") + """ + raise NotImplementedError + +def to_graph( + client: client, + df: pd.DataFrame +) -> None: + """Write records of triples stored in a DataFrame into Amazon Neptune. + + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + df : pandas.DataFrame + Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html + + Returns + ------- + None + None. + + Examples + -------- + Writing to Amazon Neptune + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> wr.neptune.sparql.to_graph( + ... df=df + ... ) + """ + raise NotImplementedError + +def to_graph( + client: client, + df: pd.DataFrame +) -> None: + """Write records stored in a DataFrame into Amazon Neptune. + + If using property graphs then DataFrames for vertices and edges must be written as separete + data frames. DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. + If the ~id column does not exist, the specified id does not exists, or is empty then a new vertex will be added. + If no ~label column exists an exception will be thrown. + DataFrames for edges must have a ~id, ~label, ~to, and ~from column. If the ~id column does not exist, + the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column exists an exception will be thrown. + + If using RDF then the DataFrame must consist of triples with column names of s, p, and o. + + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + df : pandas.DataFrame + Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html + + Returns + ------- + None + None. + + Examples + -------- + Writing to Amazon Neptune + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> wr.neptune.gremlin.to_graph( + ... df=df + ... ) + """ + raise NotImplementedError \ No newline at end of file From 5f3a307ae49fcab9160e6e64fe36e5657bb09497 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Wed, 22 Dec 2021 13:59:26 -0900 Subject: [PATCH 02/32] Added first draft of potential interface for Neptune --- awswrangler/neptune/neptune.py | 36 +++------------------------------- 1 file changed, 3 insertions(+), 33 deletions(-) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index a4ff9fed2..ef940c77f 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -94,50 +94,20 @@ def read_sparql( """ raise NotImplementedError -def to_graph( - client: client, - df: pd.DataFrame -) -> None: - """Write records of triples stored in a DataFrame into Amazon Neptune. - - Parameters - ---------- - client : neptune.Client - instance of the neptune client to use - df : pandas.DataFrame - Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html - - Returns - ------- - None - None. - - Examples - -------- - Writing to Amazon Neptune - - >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> wr.neptune.sparql.to_graph( - ... df=df - ... ) - """ - raise NotImplementedError - def to_graph( client: client, df: pd.DataFrame ) -> None: """Write records stored in a DataFrame into Amazon Neptune. - If using property graphs then DataFrames for vertices and edges must be written as separete - data frames. DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. + If writing to a property graph then DataFrames for vertices and edges must be written as separetly. + DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. If the ~id column does not exist, the specified id does not exists, or is empty then a new vertex will be added. If no ~label column exists an exception will be thrown. DataFrames for edges must have a ~id, ~label, ~to, and ~from column. If the ~id column does not exist, the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column exists an exception will be thrown. - If using RDF then the DataFrame must consist of triples with column names of s, p, and o. + If writing to RDF then the DataFrame must consist of triples with column names of s, p, and o. Parameters ---------- From 6db725e8dbbbf0d78ca32be7d89ba9719bd01943 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Thu, 23 Dec 2021 10:08:28 -0900 Subject: [PATCH 03/32] Fixed __init__.py file with the correct functions for Neptune --- awswrangler/neptune/__init__.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 205e70b59..bedb5b924 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,17 +1,11 @@ -"""Utilities Module for Amazon OpenSearch.""" +"""Utilities Module for Amazon Neptune.""" -from awswrangler.opensearch._read import search, search_by_sql -from awswrangler.opensearch._utils import connect -from awswrangler.opensearch._write import create_index, delete_index, index_csv, index_df, index_documents, index_json +from awswrangler.neptune.neptune import read_gremlin, read_opencypher, read_sparql, to_graph +from awswrangler.neptune.client import NeptuneClient __all__ = [ - "connect", - "create_index", - "delete_index", - "index_csv", - "index_documents", - "index_df", - "index_json", - "search", - "search_by_sql", + "read_gremlin", + "read_opencypher", + "read_sparql", + "to_graph" ] From 2a21a5eba19b6038737a066c6f2cbedd2c732a29 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Tue, 11 Jan 2022 15:07:17 -0900 Subject: [PATCH 04/32] [skip ci] Updated signatures per initial feedback from draft PR --- awswrangler/neptune/client.py | 32 ++++++++++++++++++++++--- awswrangler/neptune/neptune.py | 44 ++++++++++++++++++++++++++++------ 2 files changed, 66 insertions(+), 10 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index a5d1dbe83..ede09e42a 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -1,10 +1,36 @@ -from boto3 import Session +import boto3 +from awswrangler import exceptions +from typing import Optional DEFAULT_PORT = 8182 -DEFAULT_REGION = 'us-east-1' class NeptuneClient(): - def __init__(self, host: str, port: int = DEFAULT_PORT, ssl: bool = True, region: str = DEFAULT_REGION): + def __init__(self, host: str, port: int = DEFAULT_PORT, ssl: bool = True, + boto3_session: Optional[boto3.Session] = None, + region: Optional[str] = None, + ): self.host = host self.port = port self.ssl = ssl + self.boto3_session = self.__ensure_session(session=boto3_session) + if region is None: + region = self.__get_region_from_session() + else: + self.region = region + + + def __get_region_from_session(self) -> str: + """Extract region from session.""" + region: Optional[str] = self.boto3_session.region_name + if region is not None: + return region + raise exceptions.InvalidArgument("There is no region_name defined on boto3, please configure it.") + + def __ensure_session(self, session: boto3.Session = None) -> boto3.Session: + """Ensure that a valid boto3.Session will be returned.""" + if session is not None: + return session + elif boto3.DEFAULT_SESSION: + return boto3.DEFAULT_SESSION + else: + return boto3.Session() \ No newline at end of file diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index ef940c77f..abf9f4de5 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -26,7 +26,7 @@ def read_gremlin( >>> import awswrangler as wr >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.gremlin.read(client, "g.V().limit(5).valueMap()") + >>> df = wr.neptune.read_gremlin(client, "g.V().limit(5).valueMap()") """ @@ -56,7 +56,7 @@ def read_opencypher( >>> import awswrangler as wr >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.gremlin.read(client, "MATCH (n) RETURN n LIMIT 5") + >>> df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 5") """ @@ -86,7 +86,7 @@ def read_sparql( >>> import awswrangler as wr >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.sparql.read(client, "PREFIX foaf: + >>> df = wr.neptune.read_sparql(client, "PREFIX foaf: SELECT ?name WHERE { ?person foaf:name ?name . @@ -94,20 +94,50 @@ def read_sparql( """ raise NotImplementedError -def to_graph( +def to_property_graph( client: client, df: pd.DataFrame ) -> None: """Write records stored in a DataFrame into Amazon Neptune. - If writing to a property graph then DataFrames for vertices and edges must be written as separetly. + If writing to a property graph then DataFrames for vertices and edges must be written separately. DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. If the ~id column does not exist, the specified id does not exists, or is empty then a new vertex will be added. If no ~label column exists an exception will be thrown. DataFrames for edges must have a ~id, ~label, ~to, and ~from column. If the ~id column does not exist, the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column exists an exception will be thrown. - If writing to RDF then the DataFrame must consist of triples with column names of s, p, and o. + Parameters + ---------- + client : neptune.Client + instance of the neptune client to use + df : pandas.DataFrame + Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html + + Returns + ------- + None + None. + + Examples + -------- + Writing to Amazon Neptune + + >>> import awswrangler as wr + >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> wr.neptune.gremlin.to_property_graph( + ... df=df + ... ) + """ + raise NotImplementedError + +def to_rdf_graph( + client: client, + df: pd.DataFrame +) -> None: + """Write records stored in a DataFrame into Amazon Neptune. + + The DataFrame must consist of triples with column names of s, p, and o. Parameters ---------- @@ -127,7 +157,7 @@ def to_graph( >>> import awswrangler as wr >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> wr.neptune.gremlin.to_graph( + >>> wr.neptune.gremlin.to_rdf_graph( ... df=df ... ) """ From 41334b6d9ac16d0b82763c555e02ca8531209fe7 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Wed, 19 Jan 2022 16:42:52 -0900 Subject: [PATCH 05/32] [skip ci] WIP - Initial version of oc and gremlin endpoint read queries and result parsing --- awswrangler/__init__.py | 2 + awswrangler/neptune/__init__.py | 8 +- awswrangler/neptune/client.py | 171 ++++++++++- awswrangler/neptune/neptune.py | 64 ++-- poetry.lock | 509 +++++++++++++++++++++++++++++++- pyproject.toml | 1 + 6 files changed, 711 insertions(+), 44 deletions(-) diff --git a/awswrangler/__init__.py b/awswrangler/__init__.py index c3d5afe1f..17ca698d8 100644 --- a/awswrangler/__init__.py +++ b/awswrangler/__init__.py @@ -18,6 +18,7 @@ exceptions, lakeformation, mysql, + neptune, opensearch, postgresql, quicksight, @@ -47,6 +48,7 @@ "redshift", "lakeformation", "mysql", + "neptune", "postgresql", "secretsmanager", "sqlserver", diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index bedb5b924..15f181d4a 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,11 +1,13 @@ """Utilities Module for Amazon Neptune.""" -from awswrangler.neptune.neptune import read_gremlin, read_opencypher, read_sparql, to_graph -from awswrangler.neptune.client import NeptuneClient +from awswrangler.neptune.neptune import read_gremlin, read_opencypher, read_sparql, to_property_graph, to_rdf_graph +from awswrangler.neptune.client import connect __all__ = [ "read_gremlin", "read_opencypher", "read_sparql", - "to_graph" + "to_property_graph", + "to_rdf_graph", + "connect" ] diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index ede09e42a..ccfaada6a 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -1,23 +1,40 @@ -import boto3 from awswrangler import exceptions -from typing import Optional +import boto3 +from botocore.auth import SigV4Auth +from botocore.awsrequest import AWSRequest +import requests +from typing import Dict, Optional, Any +import nest_asyncio +from gremlin_python.driver import client +from gremlin_python.structure.graph import Path +from gremlin_python.structure.graph import Vertex +from gremlin_python.structure.graph import Edge +import logging + +_logger: logging.Logger = logging.getLogger(__name__) DEFAULT_PORT = 8182 +NEPTUNE_SERVICE_NAME = 'neptune-db' + class NeptuneClient(): - def __init__(self, host: str, port: int = DEFAULT_PORT, ssl: bool = True, - boto3_session: Optional[boto3.Session] = None, - region: Optional[str] = None, - ): + def __init__(self, host: str, + port: int = DEFAULT_PORT, + ssl: bool = True, + iam_enabled: bool = False, + boto3_session: Optional[boto3.Session] = None, + region: Optional[str] = None): self.host = host self.port = port - self.ssl = ssl + self._http_protocol = "https" if ssl else "http" + self._ws_protocol = "wss" if ssl else "ws" + self.iam_enabled = iam_enabled self.boto3_session = self.__ensure_session(session=boto3_session) if region is None: - region = self.__get_region_from_session() + self.region = self.__get_region_from_session() else: self.region = region - + self._http_session = requests.Session() def __get_region_from_session(self) -> str: """Extract region from session.""" @@ -26,6 +43,7 @@ def __get_region_from_session(self) -> str: return region raise exceptions.InvalidArgument("There is no region_name defined on boto3, please configure it.") + def __ensure_session(self, session: boto3.Session = None) -> boto3.Session: """Ensure that a valid boto3.Session will be returned.""" if session is not None: @@ -33,4 +51,137 @@ def __ensure_session(self, session: boto3.Session = None) -> boto3.Session: elif boto3.DEFAULT_SESSION: return boto3.DEFAULT_SESSION else: - return boto3.Session() \ No newline at end of file + return boto3.Session() + + + def _prepare_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> requests.PreparedRequest: + request = requests.Request(method=method, url=url, data=data, params=params, headers=headers) + if self.boto3_session is not None: + aws_request = self._get_aws_request(method=method, url=url, data=data, params=params, headers=headers, + service=service) + request.headers = dict(aws_request.headers) + + return request.prepare() + + + def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> AWSRequest: + req = AWSRequest(method=method, url=url, data=data, params=params, headers=headers) + if self.iam_enabled: + credentials = self.boto3_session.get_credentials() + try: + frozen_creds = credentials.get_frozen_credentials() + except AttributeError: + print("Could not find valid IAM credentials in any the following locations:\n") + print("env, assume-role, assume-role-with-web-identity, sso, shared-credential-file, custom-process, " + "config-file, ec2-credentials-file, boto-config, container-role, iam-role\n") + print("Go to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html for more " + "details on configuring your IAM credentials.") + return req + SigV4Auth(frozen_creds, service, self.region).add_auth(req) + prepared_iam_req = req.prepare() + return prepared_iam_req + else: + return req + + + def read_opencypher(self, query: str, headers:Dict[str, Any] = None) -> Dict[str, Any]: + if headers is None: + headers = {} + + if 'content-type' not in headers: + headers['content-type'] = 'application/x-www-form-urlencoded' + + url = f'{self._http_protocol}://{self.host}:{self.port}/openCypher' + data = { + 'query': query + } + + req = self._prepare_request('POST', url, data=data, headers=headers) + res = self._http_session.send(req) + + return res.json() + + + def read_gremlin(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: + try: + nest_asyncio.apply() + uri = f'{self._http_protocol}://{self.host}:{self.port}/gremlin' + request = self._prepare_request('GET', uri) + ws_url = f'{self._ws_protocol}://{self.host}:{self.port}/gremlin' + c = client.Client(ws_url, 'g', headers=dict(request.headers)) + result = c.submit(query) + future_results = result.all() + results = future_results.result() + c.close() + return self.gremlin_results_to_dict(results) + except Exception as e: + c.close() + raise e + + + def status(self): + url = f'{self._http_protocol}://{self.host}:{self.port}/status' + req = self._prepare_request('GET', url, data='') + res = self._http_session.send(req) + if res.status_code == 200: + return res.json() + else: + _logger.error("Error connecting to Amazon Neptune cluster. Please verify your connection details") + raise ConnectionError(res.status_code) + + + def gremlin_results_to_dict(self, result) -> Dict[str, Any]: + # We can accept a dict by itself or a list with a dict at position [0] + if isinstance(result, list): + if isinstance(result[0], dict): + tmp = result[0] + else: + tmp = dict(zip(result, result)) + elif isinstance(result, dict): + tmp = result + else: + return False + + # Even though we know we have a dict now, we still have work to do. It is quite + # likely that due to the way TinkerPop works, the dict values could be wrapped + # in a list of length one. In order to render the data, we first need to unroll + # those lists. If the length of the list is greater than one currently we will fail + # gracefully as we don't know what to do with a result of the form {"k":[1,2]}. + + # If the value is a simple type like Str, Int etc. we render it as is. If the value + # is a Vertex, for now we use the ID of the vertex as the value that is sent to the + # plot as the y-axis value. This yields a plot but not always a useful one! + + # It is also possible that the key is a Vertex or an Edge. For a Vertex we can just + # use the str() representation. For an Edge using the ID probably makes more sense. + + d = dict() + + for (k, v) in tmp.items(): + # If the key is a Vertex or an Edge do special processing + if isinstance(k, Vertex): + k = str(k) + elif isinstance(k, Edge): + k = k.id + + # If the value is a list do special processing + if isinstance(v, list): + if len(v) == 1: + d[k] = v[0] + else: + return False + else: + d[k] = v + + # If the value is a Vertex or Edge do special processing + if isinstance(d[k], Vertex): + # d[k] = d[k].id + d[k] = d[k].__dict__ + elif isinstance(d[k], Edge): + d[k] = d[k].__dict__ + + return d + + +def connect(host: str, port: str, iam_enabled: bool = False, ssl: bool = True, **kwargs: Any) -> NeptuneClient: + return NeptuneClient(host, port, iam_enabled, ssl, **kwargs) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index abf9f4de5..3f5eb0073 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,10 +1,11 @@ -from awswrangler.neptune import client -from typing import Any +from awswrangler.neptune.client import NeptuneClient +from typing import Any, Dict import pandas as pd + def read_gremlin( - client: client, - traversal: str + client: NeptuneClient, + query: str ) -> pd.DataFrame: """Return results of a Gremlin traversal as pandas dataframe. @@ -25,25 +26,28 @@ def read_gremlin( Run a Gremlin Query >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.read_gremlin(client, "g.V().limit(5).valueMap()") + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + >>> df = wr.neptune.read_gremlin(client, "g.V().limit(1)") + """ + results = client.read_gremlin(query) + df = pd.DataFrame.from_dict(results, orient='index') + return df + - """ - raise NotImplementedError def read_opencypher( - client: client, - traversal: str + client: NeptuneClient, + query: str ) -> pd.DataFrame: """Return results of a openCypher traversal as pandas dataframe. Parameters ---------- - client : neptune.Client + client : NeptuneClient instance of the neptune client to use - traversal : str - The gremlin traversal to execute + query : str + The openCypher query to execute Returns ------- @@ -55,25 +59,26 @@ def read_opencypher( Run an openCypher query >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 5") - - + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=False) + >>> resp = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 1") """ - raise NotImplementedError + resp = client.read_opencypher(query) + df = pd.DataFrame.from_dict(resp) + return df + def read_sparql( - client: client, - traversal: str + client: NeptuneClient, + query: str ) -> pd.DataFrame: """Return results of a SPARQL query as pandas dataframe. Parameters ---------- - client : neptune.Client + client : NeptuneClient instance of the neptune client to use - traversal : str - The gremlin traversal to execute + query : str + The SPARQL traversal to execute Returns ------- @@ -94,8 +99,9 @@ def read_sparql( """ raise NotImplementedError + def to_property_graph( - client: client, + client: NeptuneClient, df: pd.DataFrame ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -109,7 +115,7 @@ def to_property_graph( Parameters ---------- - client : neptune.Client + client : NeptuneClient instance of the neptune client to use df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html @@ -131,8 +137,9 @@ def to_property_graph( """ raise NotImplementedError + def to_rdf_graph( - client: client, + client: NeptuneClient, df: pd.DataFrame ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -141,7 +148,7 @@ def to_rdf_graph( Parameters ---------- - client : neptune.Client + client : NeptuneClient instance of the neptune client to use df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html @@ -161,4 +168,5 @@ def to_rdf_graph( ... df=df ... ) """ - raise NotImplementedError \ No newline at end of file + raise NotImplementedError + diff --git a/poetry.lock b/poetry.lock index ec44d8a88..4b7da69a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,45 @@ +[[package]] +name = "aenum" +version = "2.2.6" +description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "aiohttp" +version = "3.8.1" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<3.0" +frozenlist = ">=1.1.1" +idna-ssl = {version = ">=1.0", markers = "python_version < \"3.7\""} +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotli", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.2.0" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "alabaster" version = "0.7.12" @@ -97,6 +139,25 @@ category = "dev" optional = false python-versions = ">=3.5" +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +category = "main" +optional = false +python-versions = ">=3.5" + [[package]] name = "atomicwrites" version = "1.4.0" @@ -109,7 +170,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "attrs" version = "21.4.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -435,6 +496,14 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" +[[package]] +name = "frozenlist" +version = "1.2.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "fsspec" version = "2021.11.1" @@ -465,6 +534,21 @@ sftp = ["paramiko"] smb = ["smbprotocol"] ssh = ["paramiko"] +[[package]] +name = "gremlinpython" +version = "3.5.2" +description = "Gremlin-Python for Apache TinkerPop" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aenum = ">=1.4.5,<3.0.0" +aiohttp = ">=3.8.0,<=3.8.1" +isodate = ">=0.6.0,<1.0.0" +nest-asyncio = "*" +six = ">=1.10.0,<2.0.0" + [[package]] name = "idna" version = "3.3" @@ -473,6 +557,17 @@ category = "main" optional = false python-versions = ">=3.5" +[[package]] +name = "idna-ssl" +version = "1.1.0" +description = "Patch ssl.match_hostname for Unicode(idna) domains support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = ">=2.0" + [[package]] name = "imagesize" version = "1.3.0" @@ -592,6 +687,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + [[package]] name = "isort" version = "5.10.1" @@ -886,6 +992,14 @@ server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa ssm = ["PyYAML (>=5.1)", "dataclasses"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] +[[package]] +name = "multidict" +version = "5.2.0" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "mypy" version = "0.910" @@ -1027,7 +1141,7 @@ sphinx = ">=1.8" name = "nest-asyncio" version = "1.5.4" description = "Patch asyncio to allow nested event loops" -category = "dev" +category = "main" optional = false python-versions = ">=3.5" @@ -2006,7 +2120,7 @@ python-versions = "*" name = "typing-extensions" version = "4.0.1" description = "Backported and Experimental Type Hints for Python 3.6+" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -2123,6 +2237,19 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[[package]] +name = "yarl" +version = "1.7.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + [[package]] name = "zipp" version = "3.6.0" @@ -2141,9 +2268,96 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" +<<<<<<< HEAD content-hash = "87068395350b46ec4bfdae0a07271120ed2ab4af8377581825e487c085a82915" +======= +content-hash = "21cf29b2b21a5f5a820403d164a764354b043846cbd9d79e753d2a00828d08d8" +>>>>>>> 08f1cde ([skip ci] WIP - Initial version of oc and gremlin endpoint read queries and result parsing) [metadata.files] +aenum = [ + {file = "aenum-2.2.6-py2-none-any.whl", hash = "sha256:aaebe735508d9cbc72cd6adfb59660a5e676dfbeb6fb24fb090041e7ddb8d3b3"}, + {file = "aenum-2.2.6-py3-none-any.whl", hash = "sha256:f9d20f7302ce3dc3639b3f75c3b3e146f3b22409a6b4513c1f0bd6dbdfcbd8c1"}, + {file = "aenum-2.2.6.tar.gz", hash = "sha256:260225470b49429f5893a195a8b99c73a8d182be42bf90c37c93e7b20e44eaae"}, +] +aiohttp = [ + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, + {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, + {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, + {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, + {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, + {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, + {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, + {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, + {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, + {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, +] +aiosignal = [ + {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, + {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, +] alabaster = [ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, @@ -2195,6 +2409,14 @@ async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, ] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +asynctest = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -2421,14 +2643,95 @@ flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] +frozenlist = [ + {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:977a1438d0e0d96573fd679d291a1542097ea9f4918a8b6494b06610dfeefbf9"}, + {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8d86547a5e98d9edd47c432f7a14b0c5592624b496ae9880fb6332f34af1edc"}, + {file = "frozenlist-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:181754275d5d32487431a0a29add4f897968b7157204bc1eaaf0a0ce80c5ba7d"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5df31bb2b974f379d230a25943d9bf0d3bc666b4b0807394b131a28fca2b0e5f"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4766632cd8a68e4f10f156a12c9acd7b1609941525569dd3636d859d79279ed3"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16eef427c51cb1203a7c0ab59d1b8abccaba9a4f58c4bfca6ed278fc896dc193"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:01d79515ed5aa3d699b05f6bdcf1fe9087d61d6b53882aa599a10853f0479c6c"}, + {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28e164722ea0df0cf6d48c4d5bdf3d19e87aaa6dfb39b0ba91153f224b912020"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e63ad0beef6ece06475d29f47d1f2f29727805376e09850ebf64f90777962792"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41de4db9b9501679cf7cddc16d07ac0f10ef7eb58c525a1c8cbff43022bddca4"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a9d84ee6427b65a81fc24e6ef589cb794009f5ca4150151251c062773e7ed2"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:f5f3b2942c3b8b9bfe76b408bbaba3d3bb305ee3693e8b1d631fe0a0d4f93673"}, + {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c98d3c04701773ad60d9545cd96df94d955329efc7743fdb96422c4b669c633b"}, + {file = "frozenlist-1.2.0-cp310-cp310-win32.whl", hash = "sha256:72cfbeab7a920ea9e74b19aa0afe3b4ad9c89471e3badc985d08756efa9b813b"}, + {file = "frozenlist-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:11ff401951b5ac8c0701a804f503d72c048173208490c54ebb8d7bb7c07a6d00"}, + {file = "frozenlist-1.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b46f997d5ed6d222a863b02cdc9c299101ee27974d9bbb2fd1b3c8441311c408"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351686ca020d1bcd238596b1fa5c8efcbc21bffda9d0efe237aaa60348421e2a"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfbaa08cf1452acad9cb1c1d7b89394a41e712f88df522cea1a0f296b57782a0"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ae2f5e9fa10805fb1c9adbfefaaecedd9e31849434be462c3960a0139ed729"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6790b8d96bbb74b7a6f4594b6f131bd23056c25f2aa5d816bd177d95245a30e3"}, + {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:41f62468af1bd4e4b42b5508a3fe8cc46a693f0cdd0ca2f443f51f207893d837"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:ec6cf345771cdb00791d271af9a0a6fbfc2b6dd44cb753f1eeaa256e21622adb"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:14a5cef795ae3e28fb504b73e797c1800e9249f950e1c964bb6bdc8d77871161"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8b54cdd2fda15467b9b0bfa78cee2ddf6dbb4585ef23a16e14926f4b076dfae4"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f025f1d6825725b09c0038775acab9ae94264453a696cc797ce20c0769a7b367"}, + {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:84e97f59211b5b9083a2e7a45abf91cfb441369e8bb6d1f5287382c1c526def3"}, + {file = "frozenlist-1.2.0-cp36-cp36m-win32.whl", hash = "sha256:c5328ed53fdb0a73c8a50105306a3bc013e5ca36cca714ec4f7bd31d38d8a97f"}, + {file = "frozenlist-1.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:9ade70aea559ca98f4b1b1e5650c45678052e76a8ab2f76d90f2ac64180215a2"}, + {file = "frozenlist-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0d3ffa8772464441b52489b985d46001e2853a3b082c655ec5fad9fb6a3d618"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3457f8cf86deb6ce1ba67e120f1b0128fcba1332a180722756597253c465fc1d"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a72eecf37eface331636951249d878750db84034927c997d47f7f78a573b72b"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:acc4614e8d1feb9f46dd829a8e771b8f5c4b1051365d02efb27a3229048ade8a"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:87521e32e18a2223311afc2492ef2d99946337da0779ddcda77b82ee7319df59"}, + {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b4c7665a17c3a5430edb663e4ad4e1ad457614d1b2f2b7f87052e2ef4fa45ca"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed58803563a8c87cf4c0771366cf0ad1aa265b6b0ae54cbbb53013480c7ad74d"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa44c4740b4e23fcfa259e9dd52315d2b1770064cde9507457e4c4a65a04c397"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2de5b931701257d50771a032bba4e448ff958076380b049fd36ed8738fdb375b"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6e105013fa84623c057a4381dc8ea0361f4d682c11f3816cc80f49a1f3bc17c6"}, + {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:705c184b77565955a99dc360f359e8249580c6b7eaa4dc0227caa861ef46b27a"}, + {file = "frozenlist-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:a37594ad6356e50073fe4f60aa4187b97d15329f2138124d252a5a19c8553ea4"}, + {file = "frozenlist-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:25b358aaa7dba5891b05968dd539f5856d69f522b6de0bf34e61f133e077c1a4"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af2a51c8a381d76eabb76f228f565ed4c3701441ecec101dd18be70ebd483cfd"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:82d22f6e6f2916e837c91c860140ef9947e31194c82aaeda843d6551cec92f19"}, + {file = "frozenlist-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cfe6fef507f8bac40f009c85c7eddfed88c1c0d38c75e72fe10476cef94e10f"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f602e380a5132880fa245c92030abb0fc6ff34e0c5500600366cedc6adb06a"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ad065b2ebd09f32511ff2be35c5dfafee6192978b5a1e9d279a5c6e121e3b03"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc93f5f62df3bdc1f677066327fc81f92b83644852a31c6aa9b32c2dde86ea7d"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:89fdfc84c6bf0bff2ff3170bb34ecba8a6911b260d318d377171429c4be18c73"}, + {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:47b2848e464883d0bbdcd9493c67443e5e695a84694efff0476f9059b4cb6257"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f52d0732e56906f8ddea4bd856192984650282424049c956857fed43697ea43"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:16ef7dd5b7d17495404a2e7a49bac1bc13d6d20c16d11f4133c757dd94c4144c"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1cf63243bc5f5c19762943b0aa9e0d3fb3723d0c514d820a18a9b9a5ef864315"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:54a1e09ab7a69f843cd28fefd2bcaf23edb9e3a8d7680032c8968b8ac934587d"}, + {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:954b154a4533ef28bd3e83ffdf4eadf39deeda9e38fb8feaf066d6069885e034"}, + {file = "frozenlist-1.2.0-cp38-cp38-win32.whl", hash = "sha256:cb3957c39668d10e2b486acc85f94153520a23263b6401e8f59422ef65b9520d"}, + {file = "frozenlist-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0a7c7cce70e41bc13d7d50f0e5dd175f14a4f1837a8549b0936ed0cbe6170bf9"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4c457220468d734e3077580a3642b7f682f5fd9507f17ddf1029452450912cdc"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e74f8b4d8677ebb4015ac01fcaf05f34e8a1f22775db1f304f497f2f88fdc697"}, + {file = "frozenlist-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fbd4844ff111449f3bbe20ba24fbb906b5b1c2384d0f3287c9f7da2354ce6d23"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0081a623c886197ff8de9e635528fd7e6a387dccef432149e25c13946cb0cd0"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b6e21e5770df2dea06cb7b6323fbc008b13c4a4e3b52cb54685276479ee7676"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:406aeb340613b4b559db78d86864485f68919b7141dec82aba24d1477fd2976f"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:878ebe074839d649a1cdb03a61077d05760624f36d196884a5cafb12290e187b"}, + {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1fef737fd1388f9b93bba8808c5f63058113c10f4e3c0763ced68431773f72f9"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a495c3d513573b0b3f935bfa887a85d9ae09f0627cf47cad17d0cc9b9ba5c38"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7d0dd3e727c70c2680f5f09a0775525229809f1a35d8552b92ff10b2b14f2c2"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:66a518731a21a55b7d3e087b430f1956a36793acc15912e2878431c7aec54210"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:94728f97ddf603d23c8c3dd5cae2644fa12d33116e69f49b1644a71bb77b89ae"}, + {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c1e8e9033d34c2c9e186e58279879d78c94dd365068a3607af33f2bc99357a53"}, + {file = "frozenlist-1.2.0-cp39-cp39-win32.whl", hash = "sha256:83334e84a290a158c0c4cc4d22e8c7cfe0bba5b76d37f1c2509dabd22acafe15"}, + {file = "frozenlist-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:735f386ec522e384f511614c01d2ef9cf799f051353876b4c6fb93ef67a6d1ee"}, + {file = "frozenlist-1.2.0.tar.gz", hash = "sha256:68201be60ac56aff972dc18085800b6ee07973c49103a8aba669dee3d71079de"}, +] fsspec = [ {file = "fsspec-2021.11.1-py3-none-any.whl", hash = "sha256:bcb136caa37e1470dd8314a7d3917cb9b25dd9da44c10d36df556ab4ef038185"}, {file = "fsspec-2021.11.1.tar.gz", hash = "sha256:03683e606651d5e4bd9180525d57477bd5430e5dc68d2e459835dc14cecc3dd4"}, ] +gremlinpython = [ + {file = "gremlinpython-3.5.2-py2.py3-none-any.whl", hash = "sha256:333c8d2d67d1c5350a7ade2efd8d2acdb81968c01c09fdea80269292deadb582"}, + {file = "gremlinpython-3.5.2.tar.gz", hash = "sha256:2ebaa1f360ba62c48705b056ba3d4e92e448287431f42a92eef4b2217ae51c5b"}, +] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] +idna-ssl = [ + {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, +] imagesize = [ {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, @@ -2486,6 +2789,10 @@ ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] +isodate = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, @@ -2641,12 +2948,28 @@ lxml = [ {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, ] markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2655,14 +2978,27 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2672,6 +3008,12 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2688,6 +3030,80 @@ moto = [ {file = "moto-2.3.1-py2.py3-none-any.whl", hash = "sha256:6c48e41aecc9ca390b655ee12d327c71dac837ffb33cad9571d9deb989ddfad4"}, {file = "moto-2.3.1.tar.gz", hash = "sha256:477e6a94c5e940c649c1276276afe6e041a068943a6969009dcca69aa1e7463a"}, ] +multidict = [ + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:28e6d883acd8674887d7edc896b91751dc2d8e87fbdca8359591a13872799e4e"}, + {file = "multidict-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61f85101ef08cbbc37846ac0e43f027f7844f3fade9b7f6dd087178caedeee7"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9b668c065968c5979fe6b6fa6760bb6ab9aeb94b75b73c0a9c1acf6393ac3bf"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517d75522b7b18a3385726b54a081afd425d4f41144a5399e5abd97ccafdf36b"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b4ac3ba7a97b35a5ccf34f41b5a8642a01d1e55454b699e5e8e7a99b5a3acf5"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:df23c83398715b26ab09574217ca21e14694917a0c857e356fd39e1c64f8283f"}, + {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e58a9b5cc96e014ddf93c2227cbdeca94b56a7eb77300205d6e4001805391747"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f76440e480c3b2ca7f843ff8a48dc82446b86ed4930552d736c0bac507498a52"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cfde464ca4af42a629648c0b0d79b8f295cf5b695412451716531d6916461628"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0fed465af2e0eb6357ba95795d003ac0bdb546305cc2366b1fc8f0ad67cc3fda"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b70913cbf2e14275013be98a06ef4b412329fe7b4f83d64eb70dce8269ed1e1a"}, + {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5635bcf1b75f0f6ef3c8a1ad07b500104a971e38d3683167b9454cb6465ac86"}, + {file = "multidict-5.2.0-cp310-cp310-win32.whl", hash = "sha256:77f0fb7200cc7dedda7a60912f2059086e29ff67cefbc58d2506638c1a9132d7"}, + {file = "multidict-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9416cf11bcd73c861267e88aea71e9fcc35302b3943e45e1dbb4317f91a4b34f"}, + {file = "multidict-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd77c8f3cba815aa69cb97ee2b2ef385c7c12ada9c734b0f3b32e26bb88bbf1d"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ec9aea6223adf46999f22e2c0ab6cf33f5914be604a404f658386a8f1fba37"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5283c0a00f48e8cafcecadebfa0ed1dac8b39e295c7248c44c665c16dc1138b"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f79c19c6420962eb17c7e48878a03053b7ccd7b69f389d5831c0a4a7f1ac0a1"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4a67f1080123de76e4e97a18d10350df6a7182e243312426d508712e99988d4"}, + {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:94b117e27efd8e08b4046c57461d5a114d26b40824995a2eb58372b94f9fca02"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2e77282fd1d677c313ffcaddfec236bf23f273c4fba7cdf198108f5940ae10f5"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:116347c63ba049c1ea56e157fa8aa6edaf5e92925c9b64f3da7769bdfa012858"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:dc3a866cf6c13d59a01878cd806f219340f3e82eed514485e094321f24900677"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac42181292099d91217a82e3fa3ce0e0ddf3a74fd891b7c2b347a7f5aa0edded"}, + {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f0bb0973f42ffcb5e3537548e0767079420aefd94ba990b61cf7bb8d47f4916d"}, + {file = "multidict-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:ea21d4d5104b4f840b91d9dc8cbc832aba9612121eaba503e54eaab1ad140eb9"}, + {file = "multidict-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6453f3cbeb78440747096f239d282cc57a2997a16b5197c9bc839099e1633d0"}, + {file = "multidict-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3def943bfd5f1c47d51fd324df1e806d8da1f8e105cc7f1c76a1daf0f7e17b0"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35591729668a303a02b06e8dba0eb8140c4a1bfd4c4b3209a436a02a5ac1de11"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8cacda0b679ebc25624d5de66c705bc53dcc7c6f02a7fb0f3ca5e227d80422"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:baf1856fab8212bf35230c019cde7c641887e3fc08cadd39d32a421a30151ea3"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a43616aec0f0d53c411582c451f5d3e1123a68cc7b3475d6f7d97a626f8ff90d"}, + {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25cbd39a9029b409167aa0a20d8a17f502d43f2efebfe9e3ac019fe6796c59ac"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a2cbcfbea6dc776782a444db819c8b78afe4db597211298dd8b2222f73e9cd0"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d2d7d1fff8e09d99354c04c3fd5b560fb04639fd45926b34e27cfdec678a704"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a37e9a68349f6abe24130846e2f1d2e38f7ddab30b81b754e5a1fde32f782b23"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:637c1896497ff19e1ee27c1c2c2ddaa9f2d134bbb5e0c52254361ea20486418d"}, + {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9815765f9dcda04921ba467957be543423e5ec6a1136135d84f2ae092c50d87b"}, + {file = "multidict-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:8b911d74acdc1fe2941e59b4f1a278a330e9c34c6c8ca1ee21264c51ec9b67ef"}, + {file = "multidict-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:380b868f55f63d048a25931a1632818f90e4be71d2081c2338fcf656d299949a"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7d81ce5744757d2f05fc41896e3b2ae0458464b14b5a2c1e87a6a9d69aefaa8"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1d55cdf706ddc62822d394d1df53573d32a7a07d4f099470d3cb9323b721b6"}, + {file = "multidict-5.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4771d0d0ac9d9fe9e24e33bed482a13dfc1256d008d101485fe460359476065"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da7d57ea65744d249427793c042094c4016789eb2562576fb831870f9c878d9e"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd68778f96216596218b4e8882944d24a634d984ee1a5a049b300377878fa7c"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecc99bce8ee42dcad15848c7885197d26841cb24fa2ee6e89d23b8993c871c64"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:067150fad08e6f2dd91a650c7a49ba65085303fcc3decbd64a57dc13a2733031"}, + {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78c106b2b506b4d895ddc801ff509f941119394b89c9115580014127414e6c2d"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6c4fa1ec16e01e292315ba76eb1d012c025b99d22896bd14a66628b245e3e01"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b227345e4186809d31f22087d0265655114af7cda442ecaf72246275865bebe4"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:06560fbdcf22c9387100979e65b26fba0816c162b888cb65b845d3def7a54c9b"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7878b61c867fb2df7a95e44b316f88d5a3742390c99dfba6c557a21b30180cac"}, + {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:246145bff76cc4b19310f0ad28bd0769b940c2a49fc601b86bfd150cbd72bb22"}, + {file = "multidict-5.2.0-cp38-cp38-win32.whl", hash = "sha256:c30ac9f562106cd9e8071c23949a067b10211917fdcb75b4718cf5775356a940"}, + {file = "multidict-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:f19001e790013ed580abfde2a4465388950728861b52f0da73e8e8a9418533c0"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c1ff762e2ee126e6f1258650ac641e2b8e1f3d927a925aafcfde943b77a36d24"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd6c9c50bf2ad3f0448edaa1a3b55b2e6866ef8feca5d8dbec10ec7c94371d21"}, + {file = "multidict-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc66d4016f6e50ed36fb39cd287a3878ffcebfa90008535c62e0e90a7ab713ae"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9acb76d5f3dd9421874923da2ed1e76041cb51b9337fd7f507edde1d86535d6"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfc924a7e946dd3c6360e50e8f750d51e3ef5395c95dc054bc9eab0f70df4f9c"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32fdba7333eb2351fee2596b756d730d62b5827d5e1ab2f84e6cbb287cc67fe0"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9aad49466b8d828b96b9e3630006234879c8d3e2b0a9d99219b3121bc5cdb17"}, + {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93de39267c4c676c9ebb2057e98a8138bade0d806aad4d864322eee0803140a0"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9bef5cff994ca3026fcc90680e326d1a19df9841c5e3d224076407cc21471a1"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5f841c4f14331fd1e36cbf3336ed7be2cb2a8f110ce40ea253e5573387db7621"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:38ba256ee9b310da6a1a0f013ef4e422fca30a685bcbec86a969bd520504e341"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3bc3b1621b979621cee9f7b09f024ec76ec03cc365e638126a056317470bde1b"}, + {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ee908c070020d682e9b42c8f621e8bb10c767d04416e2ebe44e37d0f44d9ad5"}, + {file = "multidict-5.2.0-cp39-cp39-win32.whl", hash = "sha256:1c7976cd1c157fa7ba5456ae5d31ccdf1479680dc9b8d8aa28afabc370df42b8"}, + {file = "multidict-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:c9631c642e08b9fff1c6255487e62971d8b8e821808ddd013d8ac058087591ac"}, + {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, +] mypy = [ {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, {file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"}, @@ -2860,8 +3276,11 @@ pandas = [ {file = "pandas-1.3.3-cp39-cp39-win32.whl", hash = "sha256:f7d84f321674c2f0f31887ee6d5755c54ca1ea5e144d6d54b3bbf566dd9ea0cc"}, {file = "pandas-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:e574c2637c9d27f322e911650b36e858c885702c5996eda8a5a60e35e6648cf2"}, {file = "pandas-1.3.3.tar.gz", hash = "sha256:272c8cb14aa9793eada6b1ebe81994616e647b5892a370c7135efb2924b701df"}, + {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"}, + {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"}, + {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"}, {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"}, @@ -3146,24 +3565,32 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, @@ -3171,6 +3598,8 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, @@ -3457,6 +3886,80 @@ xmltodict = [ {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, ] +yarl = [ + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, + {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, + {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, + {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, + {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, + {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, + {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, + {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, + {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, + {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, + {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, + {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, + {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, + {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, +] zipp = [ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, diff --git a/pyproject.toml b/pyproject.toml index 03596be8c..25fb38a61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ opensearch-py = "^1.0.0" xlrd = { version = "^2.0.1", python = "~3.6" } xlwt = { version = "^1.3.0", python = "~3.6" } pyodbc = { version = "~4.0.32", optional = true } +gremlinpython = "^3.5.2" [tool.poetry.extras] sqlserver = ["pyodbc"] From 355b8be2f8cd02214242b7876e557ad8ef811d4c Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Tue, 25 Jan 2022 16:23:44 -0900 Subject: [PATCH 06/32] [skip ci] Initial working version of the basic read functionality associated with the three query languages --- awswrangler/neptune/__init__.py | 2 +- awswrangler/neptune/client.py | 92 ++++++++++++++++++++------------- awswrangler/neptune/neptune.py | 8 ++- 3 files changed, 62 insertions(+), 40 deletions(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 15f181d4a..98f7275fe 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,7 +1,7 @@ """Utilities Module for Amazon Neptune.""" from awswrangler.neptune.neptune import read_gremlin, read_opencypher, read_sparql, to_property_graph, to_rdf_graph -from awswrangler.neptune.client import connect +from awswrangler.neptune.client import connect, NeptuneClient __all__ = [ "read_gremlin", diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index ccfaada6a..d8663b8de 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -9,8 +9,11 @@ from gremlin_python.structure.graph import Path from gremlin_python.structure.graph import Vertex from gremlin_python.structure.graph import Edge +from gremlin_python.structure.graph import VertexProperty +from gremlin_python.structure.graph import Property import logging + _logger: logging.Logger = logging.getLogger(__name__) DEFAULT_PORT = 8182 @@ -113,11 +116,26 @@ def read_gremlin(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: future_results = result.all() results = future_results.result() c.close() - return self.gremlin_results_to_dict(results) + return self._gremlin_results_to_dict(results) except Exception as e: c.close() raise e + + def read_sparql(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: + if headers is None: + headers = {} + + data = {'query': query} + + if 'content-type' not in headers: + headers['content-type'] = 'application/x-www-form-urlencoded' + + uri = f'{self._http_protocol}://{self.host}:{self.port}/sparql' + req = self._prepare_request('POST', uri, data=data, headers=headers) + res = self._http_session.send(req) + return res + def status(self): url = f'{self._http_protocol}://{self.host}:{self.port}/status' @@ -130,56 +148,56 @@ def status(self): raise ConnectionError(res.status_code) - def gremlin_results_to_dict(self, result) -> Dict[str, Any]: - # We can accept a dict by itself or a list with a dict at position [0] - if isinstance(result, list): - if isinstance(result[0], dict): - tmp = result[0] - else: - tmp = dict(zip(result, result)) - elif isinstance(result, dict): - tmp = result - else: - return False + def _gremlin_results_to_dict(self, result) -> Dict[str, Any]: + res=[] - # Even though we know we have a dict now, we still have work to do. It is quite - # likely that due to the way TinkerPop works, the dict values could be wrapped - # in a list of length one. In order to render the data, we first need to unroll - # those lists. If the length of the list is greater than one currently we will fail - # gracefully as we don't know what to do with a result of the form {"k":[1,2]}. + # For lists or paths unwind them + if isinstance(result, list) or isinstance(result, Path): + for x in result: + res.append(self._parse_dict(x)) - # If the value is a simple type like Str, Int etc. we render it as is. If the value - # is a Vertex, for now we use the ID of the vertex as the value that is sent to the - # plot as the y-axis value. This yields a plot but not always a useful one! + # For dictionaries just add them + elif isinstance(result, dict): + res.append(result) + + # For everything else parse them + else: + res.append(self._parse_dict(result)) + return res - # It is also possible that the key is a Vertex or an Edge. For a Vertex we can just - # use the str() representation. For an Edge using the ID probably makes more sense. + def _parse_dict(self, data) -> Dict[str, Any]: d = dict() - for (k, v) in tmp.items(): + # If this is a list or Path then unwind it + if isinstance(data, list) or isinstance(data, Path): + res=[] + for x in data: + res.append(self._parse_dict(x)) + return res + + # If this is an element then make it a dictionary + elif isinstance(data, Vertex) or isinstance(data,Edge) or isinstance(data, VertexProperty) or isinstance(data, Property): + data=data.__dict__ + + # If this is a scalar then create a Map with it + elif not hasattr(data, "__len__") or isinstance(data, str): + data = {0: data} + + for (k, v) in data.items(): # If the key is a Vertex or an Edge do special processing - if isinstance(k, Vertex): - k = str(k) - elif isinstance(k, Edge): + if isinstance(k, Vertex) or isinstance(k, Edge): k = k.id - # If the value is a list do special processing - if isinstance(v, list): - if len(v) == 1: - d[k] = v[0] - else: - return False + # If the value is a list do special processing to make it a scalar if the list is of length 1 + if isinstance(v, list) and len(v) == 1: + d[k] = v[0] else: d[k] = v # If the value is a Vertex or Edge do special processing - if isinstance(d[k], Vertex): - # d[k] = d[k].id + if isinstance(d[k], Vertex) or isinstance(d[k], Edge) or isinstance(d[k], VertexProperty) or isinstance(d[k], Property): d[k] = d[k].__dict__ - elif isinstance(d[k], Edge): - d[k] = d[k].__dict__ - return d diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 3f5eb0073..a17a8e2e4 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -30,7 +30,7 @@ def read_gremlin( >>> df = wr.neptune.read_gremlin(client, "g.V().limit(1)") """ results = client.read_gremlin(query) - df = pd.DataFrame.from_dict(results, orient='index') + df = pd.DataFrame.from_records(results) return df @@ -97,7 +97,11 @@ def read_sparql( ?person foaf:name ?name . }") """ - raise NotImplementedError + resp = client.read_sparql(query) + data = resp.json() + df = pd.DataFrame(data['results']['bindings']) + df.applymap(lambda x: x['value']) + return df def to_property_graph( From ce3b697a67d6c9c6280477717a73d922d27e6f33 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Tue, 25 Jan 2022 16:31:35 -0900 Subject: [PATCH 07/32] [skip ci] Fixed tests that were not running correctly due to typo --- tests/test_neptune.py | 134 +++++++++++++++++ tests/test_neptune_gremlin_parsing.py | 207 ++++++++++++++++++++++++++ 2 files changed, 341 insertions(+) create mode 100644 tests/test_neptune.py create mode 100644 tests/test_neptune_gremlin_parsing.py diff --git a/tests/test_neptune.py b/tests/test_neptune.py new file mode 100644 index 000000000..6e2febec8 --- /dev/null +++ b/tests/test_neptune.py @@ -0,0 +1,134 @@ +import json +import logging +import tempfile +import time +import requests +from typing import Any, Dict +from urllib.error import HTTPError + +import boto3 +import pandas as pd +import pytest # type: ignore + +import awswrangler as wr + +from ._utils import extract_cloudformation_outputs + +logging.getLogger("awswrangler").setLevel(logging.DEBUG) + +@pytest.fixture(scope="session") +def cloudformation_outputs(): + outputs = {} + outputs['cluster_resource_id']='XXX' + outputs['endpoint'] = 'air-routes-graph-1509728730.us-west-2.elb.amazonaws.com' + outputs['read_endpoint'] = 'air-routes-graph-1509728730.us-west-2.elb.amazonaws.com' + outputs['port'] = 80 + outputs['ssl'] = False + outputs['iam_enabled'] = False + return outputs + + +@pytest.fixture(scope="session") +def neptune_endpoint(cloudformation_outputs) -> str: + return cloudformation_outputs["endpoint"] + + +@pytest.fixture(scope="session") +def neptune_read_endpoint(cloudformation_outputs) -> str: + return cloudformation_outputs["read_endpoint"] + + +@pytest.fixture(scope="session") +def neptune_port(cloudformation_outputs) -> int: + return cloudformation_outputs["port"] + + +def test_connection_neptune(neptune_endpoint, neptune_port): + client = wr.neptune.connect(neptune_endpoint, neptune_port) + resp = client.status() + assert len(resp) > 0 + + +def test_connection_neptune_http(neptune_endpoint, neptune_port): + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + resp = client.status() + assert len(resp) > 0 + + +@pytest.mark.skip("Need infra") +def test_connection_neptune_https(neptune_endpoint, neptune_port): + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=False) + resp = client.status() + assert len(resp.text) > 0 + + +@pytest.mark.skip("Need infra") +def test_connection_neptune_http_iam(neptune_endpoint, neptune_port): + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=True) + resp = client.status() + assert len(resp.text) > 0 + + +@pytest.mark.skip("Need infra") +def test_connection_neptune_https_iam(neptune_endpoint, neptune_port): + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=True) + resp = client.status() + assert resp.status_code == 200 + assert len(resp.text) > 0 + + +def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 1") + assert isinstance(df, pd.DataFrame) + assert len(df.index) == 1 + + assert isinstance(df, pd.DataFrame) + df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 2") + assert len(df.index) == 2 + + df = wr.neptune.read_opencypher(client, "MATCH p=(n)-[r]->(d) RETURN p LIMIT 1") + assert isinstance(df, pd.DataFrame) + assert len(df.index) == 1 + + +def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + + df = wr.neptune.read_gremlin(client, "g.V().limit(1)") + assert isinstance(df, pd.DataFrame) + assert df.shape == (1, 2) + + df = wr.neptune.read_gremlin(client, "g.V().limit(2)") + assert isinstance(df, pd.DataFrame) + assert df.shape == (2, 2) + + +def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + + df = wr.neptune.read_gremlin(client, "g.E().limit(1)") + assert isinstance(df, pd.DataFrame) + assert df.shape == (1, 4) + + df = wr.neptune.read_gremlin(client, "g.E().limit(2)") + assert isinstance(df, pd.DataFrame) + assert df.shape == (2, 4) + + +def test_gremlin_query_no_results(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + + df = wr.neptune.read_gremlin(client, "g.V('foo').drop()") + assert isinstance(df, pd.DataFrame) + + +def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 1") + assert isinstance(df, pd.DataFrame) + assert df.shape == (1,3) + + df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 2") + assert isinstance(df, pd.DataFrame) + assert df.shape == (2,3) \ No newline at end of file diff --git a/tests/test_neptune_gremlin_parsing.py b/tests/test_neptune_gremlin_parsing.py new file mode 100644 index 000000000..85b4785cb --- /dev/null +++ b/tests/test_neptune_gremlin_parsing.py @@ -0,0 +1,207 @@ +import logging + +import pandas as pd +import pytest # type: ignore +from gremlin_python.structure.graph import Path +from gremlin_python.structure.graph import Vertex +from gremlin_python.structure.graph import Edge +from gremlin_python.structure.graph import VertexProperty +from gremlin_python.structure.graph import Property +from gremlin_python.process.traversal import T + +import awswrangler as wr + +logging.getLogger("awswrangler").setLevel(logging.DEBUG) + +@pytest.fixture(scope="session") +def neptune_client() -> wr.neptune.NeptuneClient: + c = object.__new__(wr.neptune.NeptuneClient) + return c + + +#parse Vertex elements +def test_parse_vertex_elements(neptune_client): + # parse vertex elements + v = Vertex("foo") + input = [v] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['id'] == 'foo' + assert row['label'] == 'vertex' + + # parse multiple vertex elements + v1 = Vertex("bar") + input = [v, v1] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,2) + assert row['id'] == 'bar' + assert row['label'] == 'vertex' + + +#parse Edge elements +def test_parse_edge_elements(neptune_client): + # parse edge elements + v = Edge("foo", 'out1', 'label', 'in1') + input = [v] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,4) + assert row['id'] == 'foo' + assert row['outV'] == 'out1' + assert row['label'] == 'label' + assert row['inV'] == 'in1' + + # parse multiple edge elements + v1 = Edge("bar", 'out1', 'label', 'in2') + input = [v, v1] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,4) + assert row['id'] == 'bar' + assert row['outV'] == 'out1' + assert row['label'] == 'label' + assert row['inV'] == 'in2' + +#parse Property elements +def test_parse_property_elements(neptune_client): + # parse VertexProperty elements + v = VertexProperty("foo", 'name', 'bar', 'v1') + input = [v] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,5) + assert row['id'] == 'foo' + assert row['label'] == 'name' + assert row['value'] == 'bar' + assert row['key'] == 'name' + assert row['vertex'] == 'v1' + + v = Property("foo", 'name', 'bar') + input = [v] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row['element'] == 'bar' + assert row['value'] == 'name' + assert row['key'] == 'foo' + + +#parse Path elements +def test_parse_path_elements(neptune_client): + #parse path with elements + v = Vertex("foo") + v2 = Vertex("bar") + e1 = Edge("e1", 'foo', 'label', 'bar') + p = Path(labels=["vertex", "label", "vertex"], objects=[v, e1, v2]) + out = neptune_client._gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0] == {'id': 'foo', 'label': 'vertex'} + assert row[1] == {'id': 'e1', 'label': 'label', 'outV': 'foo', 'inV': 'bar'} + assert row[2] == {'id': 'bar', 'label': 'vertex'} + + #parse path with multiple elements + e2 = Edge("bar", 'out1', 'label', 'in2') + v3 = Vertex("in2") + p1 = Path(labels=["vertex", "label", "vertex"], objects=[v2, e2, v3]) + out = neptune_client._gremlin_results_to_dict([p, p1]) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,3) + assert row[0] == {'id': 'bar', 'label': 'vertex'} + assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} + assert row[2] == {'id': 'in2', 'label': 'vertex'} + + #parse path with maps + p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, {'dist': 32}, {'name': 'bar', 'age': 40}]) + out = neptune_client._gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0]['name'] == 'foo' + assert row[0]['age'] == 29 + assert row[1]['dist'] == 32 + assert row[2]['name'] == 'bar' + assert row[2]['age'] == 40 + + #parse path with mixed elements and maps + p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, + Edge("bar", 'out1', 'label', 'in2'), {'name': 'bar', 'age': 40}]) + out = neptune_client._gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0]['name'] == 'foo' + assert row[0]['age'] == 29 + assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} + assert row[2]['name'] == 'bar' + assert row[2]['age'] == 40 + + +#parse vertex valueMap +def test_parse_maps(neptune_client): + # parse map + m = {'name': 'foo', 'age': 29} + input = [m] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['name'] == 'foo' + assert row['age'] == 29 + + # parse multiple maps with T + m1 = {'name': ['foo'], T.id: '2', 'age': [40], T.label: 'vertex'} + input = [m, m1] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,4) + assert row['name'] == 'foo' + assert row['age'] == 40 + assert row[T.id] == '2' + assert row[T.label] == 'vertex' + m2 = {'name': ['foo', 'bar'], T.id: '2', T.label: 'vertex'} + input = [m, m1, m2] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[2] + assert df.shape == (3,4) + assert row['name'] == ['foo', 'bar'] + assert row[T.id] == '2' + assert row[T.label] == 'vertex' + +#parse scalar +def test_parse_scalar(neptune_client): + # parse map + m = 12 + n = "Foo" + input = [m, n] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (2,1) + assert row[0] == 12 + row = df.iloc[1] + assert row[0] == "Foo" + + +#parse subgraph +def test_parse_subgraph(neptune_client): + m = {'@type': 'tinker:graph', '@value': {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']}} + input = [m] + out = neptune_client._gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['@type'] == 'tinker:graph' + assert row['@value'] == {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']} From 2c84615c074dc0bc8ffbf4131fb8315b07ec1d58 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Wed, 2 Feb 2022 12:04:52 -0900 Subject: [PATCH 08/32] WIP on writing data --- awswrangler/neptune/client.py | 33 ++-- awswrangler/neptune/neptune.py | 16 +- poetry.lock | 18 ++- pyproject.toml | 3 +- tests/test_neptune.py | 84 +++++------ tests/test_neptune_gremlin_parsing.py | 207 -------------------------- 6 files changed, 81 insertions(+), 280 deletions(-) delete mode 100644 tests/test_neptune_gremlin_parsing.py diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index d8663b8de..e984c3ab9 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -18,19 +18,14 @@ DEFAULT_PORT = 8182 NEPTUNE_SERVICE_NAME = 'neptune-db' +HTTP_PROTOCOL = 'https' +WS_PROTOCOL = 'wss' class NeptuneClient(): - def __init__(self, host: str, - port: int = DEFAULT_PORT, - ssl: bool = True, - iam_enabled: bool = False, - boto3_session: Optional[boto3.Session] = None, - region: Optional[str] = None): + def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = False, boto3_session: Optional[boto3.Session] = None, region: Optional[str] = None): self.host = host self.port = port - self._http_protocol = "https" if ssl else "http" - self._ws_protocol = "wss" if ssl else "ws" self.iam_enabled = iam_enabled self.boto3_session = self.__ensure_session(session=boto3_session) if region is None: @@ -94,23 +89,23 @@ def read_opencypher(self, query: str, headers:Dict[str, Any] = None) -> Dict[str if 'content-type' not in headers: headers['content-type'] = 'application/x-www-form-urlencoded' - url = f'{self._http_protocol}://{self.host}:{self.port}/openCypher' + url = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/openCypher' data = { 'query': query } req = self._prepare_request('POST', url, data=data, headers=headers) res = self._http_session.send(req) - - return res.json() + return res.json()['results'] - def read_gremlin(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: + def read_gremlin(self, query, headers:Dict[str, Any] = None, nest_event_loop:bool=False) -> Dict[str, Any]: try: - nest_asyncio.apply() - uri = f'{self._http_protocol}://{self.host}:{self.port}/gremlin' + if nest_event_loop: + nest_asyncio.apply() + uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin' request = self._prepare_request('GET', uri) - ws_url = f'{self._ws_protocol}://{self.host}:{self.port}/gremlin' + ws_url = f'{WS_PROTOCOL}://{self.host}:{self.port}/gremlin' c = client.Client(ws_url, 'g', headers=dict(request.headers)) result = c.submit(query) future_results = result.all() @@ -131,14 +126,14 @@ def read_sparql(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: if 'content-type' not in headers: headers['content-type'] = 'application/x-www-form-urlencoded' - uri = f'{self._http_protocol}://{self.host}:{self.port}/sparql' + uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/sparql' req = self._prepare_request('POST', uri, data=data, headers=headers) res = self._http_session.send(req) return res def status(self): - url = f'{self._http_protocol}://{self.host}:{self.port}/status' + url = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/status' req = self._prepare_request('GET', url, data='') res = self._http_session.send(req) if res.status_code == 200: @@ -201,5 +196,5 @@ def _parse_dict(self, data) -> Dict[str, Any]: return d -def connect(host: str, port: str, iam_enabled: bool = False, ssl: bool = True, **kwargs: Any) -> NeptuneClient: - return NeptuneClient(host, port, iam_enabled, ssl, **kwargs) +def connect(host: str, port: str, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: + return NeptuneClient(host, port, iam_enabled, **kwargs) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index a17a8e2e4..b0ede2acf 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -5,7 +5,8 @@ def read_gremlin( client: NeptuneClient, - query: str + query: str, + **kwargs ) -> pd.DataFrame: """Return results of a Gremlin traversal as pandas dataframe. @@ -29,13 +30,11 @@ def read_gremlin( >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) >>> df = wr.neptune.read_gremlin(client, "g.V().limit(1)") """ - results = client.read_gremlin(query) + results = client.read_gremlin(query, kwargs) df = pd.DataFrame.from_records(results) return df - - def read_opencypher( client: NeptuneClient, query: str @@ -106,7 +105,9 @@ def read_sparql( def to_property_graph( client: NeptuneClient, - df: pd.DataFrame + df: pd.DataFrame, + batch_size: int=50, + **kwargs ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -139,6 +140,11 @@ def to_property_graph( ... df=df ... ) """ + #check if ~id and ~label column exist and if not throw error + + #Loop through items in the DF + # build up a query + # run the query raise NotImplementedError diff --git a/poetry.lock b/poetry.lock index 4b7da69a4..fff454d84 100644 --- a/poetry.lock +++ b/poetry.lock @@ -199,6 +199,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "backoff" +version = "1.11.1" +description = "Function decoration for backoff and retry" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "beautifulsoup4" version = "4.10.0" @@ -2268,11 +2276,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -<<<<<<< HEAD -content-hash = "87068395350b46ec4bfdae0a07271120ed2ab4af8377581825e487c085a82915" -======= -content-hash = "21cf29b2b21a5f5a820403d164a764354b043846cbd9d79e753d2a00828d08d8" ->>>>>>> 08f1cde ([skip ci] WIP - Initial version of oc and gremlin endpoint read queries and result parsing) +content-hash = "b4872d5d0dd8e77eb7e6039feef8d2135bb496fe7d82c8341f8df0f12e89e8b0" [metadata.files] aenum = [ @@ -2433,6 +2437,10 @@ backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +backoff = [ + {file = "backoff-1.11.1-py2.py3-none-any.whl", hash = "sha256:61928f8fa48d52e4faa81875eecf308eccfb1016b018bb6bd21e05b5d90a96c5"}, + {file = "backoff-1.11.1.tar.gz", hash = "sha256:ccb962a2378418c667b3c979b504fdeb7d9e0d29c0579e3b13b86467177728cb"}, +] beautifulsoup4 = [ {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, diff --git a/pyproject.toml b/pyproject.toml index 25fb38a61..d5e0765e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "awswrangler" -version = "2.13.0" +version = "2.99.0" description = "Pandas on AWS." authors = ["Igor Tavares"] license = "Apache License 2.0" @@ -51,6 +51,7 @@ xlrd = { version = "^2.0.1", python = "~3.6" } xlwt = { version = "^1.3.0", python = "~3.6" } pyodbc = { version = "~4.0.32", optional = true } gremlinpython = "^3.5.2" +backoff = "^1.11.1" [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/tests/test_neptune.py b/tests/test_neptune.py index 6e2febec8..ea914d158 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -1,10 +1,5 @@ -import json import logging -import tempfile -import time -import requests from typing import Any, Dict -from urllib.error import HTTPError import boto3 import pandas as pd @@ -20,10 +15,9 @@ def cloudformation_outputs(): outputs = {} outputs['cluster_resource_id']='XXX' - outputs['endpoint'] = 'air-routes-graph-1509728730.us-west-2.elb.amazonaws.com' - outputs['read_endpoint'] = 'air-routes-graph-1509728730.us-west-2.elb.amazonaws.com' - outputs['port'] = 80 - outputs['ssl'] = False + outputs['endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' + outputs['read_endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' + outputs['port'] = 8182 outputs['iam_enabled'] = False return outputs @@ -43,57 +37,42 @@ def neptune_port(cloudformation_outputs) -> int: return cloudformation_outputs["port"] -def test_connection_neptune(neptune_endpoint, neptune_port): - client = wr.neptune.connect(neptune_endpoint, neptune_port) - resp = client.status() - assert len(resp) > 0 - - -def test_connection_neptune_http(neptune_endpoint, neptune_port): - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) - resp = client.status() - assert len(resp) > 0 - - -@pytest.mark.skip("Need infra") def test_connection_neptune_https(neptune_endpoint, neptune_port): - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=False) + client = wr.neptune.connect(host=neptune_endpoint, port=neptune_port, iam_enabled=False) resp = client.status() - assert len(resp.text) > 0 + assert resp['status'] == 'healthy' -@pytest.mark.skip("Need infra") -def test_connection_neptune_http_iam(neptune_endpoint, neptune_port): - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=True) - resp = client.status() - assert len(resp.text) > 0 - - -@pytest.mark.skip("Need infra") def test_connection_neptune_https_iam(neptune_endpoint, neptune_port): - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=True) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=True) resp = client.status() - assert resp.status_code == 200 - assert len(resp.text) > 0 + assert resp['status'] == 'healthy' def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 1") assert isinstance(df, pd.DataFrame) - assert len(df.index) == 1 + assert df.shape == (1, 1) assert isinstance(df, pd.DataFrame) df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 2") - assert len(df.index) == 2 + assert df.shape == (2, 1) df = wr.neptune.read_opencypher(client, "MATCH p=(n)-[r]->(d) RETURN p LIMIT 1") assert isinstance(df, pd.DataFrame) - assert len(df.index) == 1 + assert df.shape == (1, 1) + + df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN id(n), labels(n) LIMIT 1") + assert isinstance(df, pd.DataFrame) + assert df.shape == (1, 2) + row = df.iloc[0] + assert row['id(n)'] + assert row['labels(n)'] def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_gremlin(client, "g.V().limit(1)") assert isinstance(df, pd.DataFrame) @@ -102,10 +81,18 @@ def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any df = wr.neptune.read_gremlin(client, "g.V().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 2) + + df = wr.neptune.read_gremlin(client, "g.V().limit(1)", nest_event_loop=True) + assert isinstance(df, pd.DataFrame) + assert df.shape == (1, 2) + + df = wr.neptune.read_gremlin(client, "g.V().limit(2)", nest_event_loop=True) + assert isinstance(df, pd.DataFrame) + assert df.shape == (2, 2) def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_gremlin(client, "g.E().limit(1)") assert isinstance(df, pd.DataFrame) @@ -115,16 +102,27 @@ def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert isinstance(df, pd.DataFrame) assert df.shape == (2, 4) + df = wr.neptune.read_gremlin(client, "g.E().limit(1)", nest_event_loop=True) + assert isinstance(df, pd.DataFrame) + assert df.shape == (1, 4) + + df = wr.neptune.read_gremlin(client, "g.E().limit(2)", nest_event_loop=True) + assert isinstance(df, pd.DataFrame) + assert df.shape == (2, 4) + def test_gremlin_query_no_results(neptune_endpoint, neptune_port) -> Dict[str, Any]: - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_gremlin(client, "g.V('foo').drop()") assert isinstance(df, pd.DataFrame) + + df = wr.neptune.read_gremlin(client, "g.V('foo').drop()", nest_event_loop=True) + assert isinstance(df, pd.DataFrame) def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: - client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1,3) diff --git a/tests/test_neptune_gremlin_parsing.py b/tests/test_neptune_gremlin_parsing.py deleted file mode 100644 index 85b4785cb..000000000 --- a/tests/test_neptune_gremlin_parsing.py +++ /dev/null @@ -1,207 +0,0 @@ -import logging - -import pandas as pd -import pytest # type: ignore -from gremlin_python.structure.graph import Path -from gremlin_python.structure.graph import Vertex -from gremlin_python.structure.graph import Edge -from gremlin_python.structure.graph import VertexProperty -from gremlin_python.structure.graph import Property -from gremlin_python.process.traversal import T - -import awswrangler as wr - -logging.getLogger("awswrangler").setLevel(logging.DEBUG) - -@pytest.fixture(scope="session") -def neptune_client() -> wr.neptune.NeptuneClient: - c = object.__new__(wr.neptune.NeptuneClient) - return c - - -#parse Vertex elements -def test_parse_vertex_elements(neptune_client): - # parse vertex elements - v = Vertex("foo") - input = [v] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,2) - assert row['id'] == 'foo' - assert row['label'] == 'vertex' - - # parse multiple vertex elements - v1 = Vertex("bar") - input = [v, v1] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[1] - assert df.shape == (2,2) - assert row['id'] == 'bar' - assert row['label'] == 'vertex' - - -#parse Edge elements -def test_parse_edge_elements(neptune_client): - # parse edge elements - v = Edge("foo", 'out1', 'label', 'in1') - input = [v] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,4) - assert row['id'] == 'foo' - assert row['outV'] == 'out1' - assert row['label'] == 'label' - assert row['inV'] == 'in1' - - # parse multiple edge elements - v1 = Edge("bar", 'out1', 'label', 'in2') - input = [v, v1] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[1] - assert df.shape == (2,4) - assert row['id'] == 'bar' - assert row['outV'] == 'out1' - assert row['label'] == 'label' - assert row['inV'] == 'in2' - -#parse Property elements -def test_parse_property_elements(neptune_client): - # parse VertexProperty elements - v = VertexProperty("foo", 'name', 'bar', 'v1') - input = [v] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,5) - assert row['id'] == 'foo' - assert row['label'] == 'name' - assert row['value'] == 'bar' - assert row['key'] == 'name' - assert row['vertex'] == 'v1' - - v = Property("foo", 'name', 'bar') - input = [v] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,3) - assert row['element'] == 'bar' - assert row['value'] == 'name' - assert row['key'] == 'foo' - - -#parse Path elements -def test_parse_path_elements(neptune_client): - #parse path with elements - v = Vertex("foo") - v2 = Vertex("bar") - e1 = Edge("e1", 'foo', 'label', 'bar') - p = Path(labels=["vertex", "label", "vertex"], objects=[v, e1, v2]) - out = neptune_client._gremlin_results_to_dict([p]) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,3) - assert row[0] == {'id': 'foo', 'label': 'vertex'} - assert row[1] == {'id': 'e1', 'label': 'label', 'outV': 'foo', 'inV': 'bar'} - assert row[2] == {'id': 'bar', 'label': 'vertex'} - - #parse path with multiple elements - e2 = Edge("bar", 'out1', 'label', 'in2') - v3 = Vertex("in2") - p1 = Path(labels=["vertex", "label", "vertex"], objects=[v2, e2, v3]) - out = neptune_client._gremlin_results_to_dict([p, p1]) - df = pd.DataFrame.from_records(out) - row = df.iloc[1] - assert df.shape == (2,3) - assert row[0] == {'id': 'bar', 'label': 'vertex'} - assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} - assert row[2] == {'id': 'in2', 'label': 'vertex'} - - #parse path with maps - p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, {'dist': 32}, {'name': 'bar', 'age': 40}]) - out = neptune_client._gremlin_results_to_dict([p]) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,3) - assert row[0]['name'] == 'foo' - assert row[0]['age'] == 29 - assert row[1]['dist'] == 32 - assert row[2]['name'] == 'bar' - assert row[2]['age'] == 40 - - #parse path with mixed elements and maps - p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, - Edge("bar", 'out1', 'label', 'in2'), {'name': 'bar', 'age': 40}]) - out = neptune_client._gremlin_results_to_dict([p]) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,3) - assert row[0]['name'] == 'foo' - assert row[0]['age'] == 29 - assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} - assert row[2]['name'] == 'bar' - assert row[2]['age'] == 40 - - -#parse vertex valueMap -def test_parse_maps(neptune_client): - # parse map - m = {'name': 'foo', 'age': 29} - input = [m] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,2) - assert row['name'] == 'foo' - assert row['age'] == 29 - - # parse multiple maps with T - m1 = {'name': ['foo'], T.id: '2', 'age': [40], T.label: 'vertex'} - input = [m, m1] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[1] - assert df.shape == (2,4) - assert row['name'] == 'foo' - assert row['age'] == 40 - assert row[T.id] == '2' - assert row[T.label] == 'vertex' - m2 = {'name': ['foo', 'bar'], T.id: '2', T.label: 'vertex'} - input = [m, m1, m2] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[2] - assert df.shape == (3,4) - assert row['name'] == ['foo', 'bar'] - assert row[T.id] == '2' - assert row[T.label] == 'vertex' - -#parse scalar -def test_parse_scalar(neptune_client): - # parse map - m = 12 - n = "Foo" - input = [m, n] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (2,1) - assert row[0] == 12 - row = df.iloc[1] - assert row[0] == "Foo" - - -#parse subgraph -def test_parse_subgraph(neptune_client): - m = {'@type': 'tinker:graph', '@value': {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']}} - input = [m] - out = neptune_client._gremlin_results_to_dict(input) - df = pd.DataFrame.from_records(out) - row = df.iloc[0] - assert df.shape == (1,2) - assert row['@type'] == 'tinker:graph' - assert row['@value'] == {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']} From 494e3301e3643f9c2c073e048d1cc1360428624a Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 4 Feb 2022 13:59:07 -0900 Subject: [PATCH 09/32] [skip ci] Have a working version of the complete roundtrip for proeprty graphs --- awswrangler/neptune/__init__.py | 3 +- awswrangler/neptune/client.py | 19 +++++-- awswrangler/neptune/neptune.py | 83 ++++++++++++++++++++++++++++-- tests/test_neptune.py | 91 +++++++++++++++++++++++++-------- 4 files changed, 167 insertions(+), 29 deletions(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 98f7275fe..3e36d5424 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -9,5 +9,6 @@ "read_sparql", "to_property_graph", "to_rdf_graph", - "connect" + "connect", + "get_graph_traversal_source" ] diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index e984c3ab9..e1cb151a6 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -4,16 +4,15 @@ from botocore.awsrequest import AWSRequest import requests from typing import Dict, Optional, Any -import nest_asyncio from gremlin_python.driver import client from gremlin_python.structure.graph import Path from gremlin_python.structure.graph import Vertex from gremlin_python.structure.graph import Edge from gremlin_python.structure.graph import VertexProperty from gremlin_python.structure.graph import Property +from gremlin_python.structure.graph import Graph import logging - _logger: logging.Logger = logging.getLogger(__name__) DEFAULT_PORT = 8182 @@ -34,6 +33,7 @@ def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = Fals self.region = region self._http_session = requests.Session() + def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -99,10 +99,15 @@ def read_opencypher(self, query: str, headers:Dict[str, Any] = None) -> Dict[str return res.json()['results'] - def read_gremlin(self, query, headers:Dict[str, Any] = None, nest_event_loop:bool=False) -> Dict[str, Any]: + def read_gremlin(self, query) -> Dict[str, Any]: + return self._execute_gremlin(query) + + def write_gremlin(self, query) -> bool: + self._execute_gremlin(query) + return True + + def _execute_gremlin(self, query) -> Dict[str, Any]: try: - if nest_event_loop: - nest_asyncio.apply() uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin' request = self._prepare_request('GET', uri) ws_url = f'{WS_PROTOCOL}://{self.host}:{self.port}/gremlin' @@ -143,6 +148,10 @@ def status(self): raise ConnectionError(res.status_code) + def get_graph_traversal_source(self): + return Graph().traversal() + + def _gremlin_results_to_dict(self, result) -> Dict[str, Any]: res=[] diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index b0ede2acf..e8d6aaf45 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,6 +1,19 @@ +from pyparsing import col from awswrangler.neptune.client import NeptuneClient from typing import Any, Dict import pandas as pd +from awswrangler import exceptions +from gremlin_python.process.graph_traversal import GraphTraversalSource +from gremlin_python.process.anonymous_traversal import AnonymousTraversalSource +from gremlin_python.process.translator import Translator +from gremlin_python.process.traversal import T +from gremlin_python.process.graph_traversal import __ +from gremlin_python.process.traversal import Cardinality + +import logging + + +_logger: logging.Logger = logging.getLogger(__name__) def read_gremlin( @@ -30,7 +43,7 @@ def read_gremlin( >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) >>> df = wr.neptune.read_gremlin(client, "g.V().limit(1)") """ - results = client.read_gremlin(query, kwargs) + results = client.read_gremlin(query, **kwargs) df = pd.DataFrame.from_records(results) return df @@ -141,12 +154,76 @@ def to_property_graph( ... ) """ #check if ~id and ~label column exist and if not throw error + g = client.get_graph_traversal_source() + is_edge_df=False + if '~id' in df.columns and '~label' in df.columns: + if '~to' in df.columns and '~from' in df.columns: + is_edge_df=True + else: + raise exceptions.InvalidArgumentValue( + "Dataframe must contain at least a ~id and a ~label column to be saved to Amazon Neptune" + ) #Loop through items in the DF + for (index, row) in df.iterrows(): # build up a query + if is_edge_df: + g=_build_gremlin_insert_edges(g, row.to_dict()) + else: + g=_build_gremlin_insert_vertices(g, row.to_dict()) # run the query - raise NotImplementedError - + if index > 0 and index % batch_size == 0: + res = _run_gremlin_insert(client, g) + if res: + g = client.get_graph_traversal_source() + else: + raise Exception("Need to fix why this errors") + + return _run_gremlin_insert(client, g) + +def _build_gremlin_insert_vertices(g:GraphTraversalSource, row:Dict) -> str: + g = (g.V(str(row['~id'])). + fold(). + coalesce( + __.unfold(), + __.addV(row['~label']).property(T.id, str(row['~id']))) + ) + for (column, value) in row.items(): + if column not in ['~id', '~label']: + if type(value) is list and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + +def _build_gremlin_insert_edges(g:GraphTraversalSource, row:pd.Series) -> str: + g = (g.V(str(row['~from'])). + fold(). + coalesce( + __.unfold(), + _build_gremlin_insert_vertices(__, {"~id": row['~from'], "~label": "Vertex" })). + addE(row['~label']). + to(__.V(str(row['~to'])).fold().coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row['~to'], "~label": "Vertex" }))) + ) + for (column, value) in row.items(): + if column not in ['~id', '~label', '~to', '~from']: + if type(value) is list and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + +def _run_gremlin_insert(client:NeptuneClient, g:GraphTraversalSource) -> bool: + translator = Translator('g') + s = translator.translate(g.bytecode) + s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality + _logger.debug(s) + res = client.write_gremlin(s) + return res def to_rdf_graph( client: NeptuneClient, diff --git a/tests/test_neptune.py b/tests/test_neptune.py index ea914d158..d80d11a26 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -4,6 +4,9 @@ import boto3 import pandas as pd import pytest # type: ignore +import uuid +import random +import string import awswrangler as wr @@ -81,14 +84,6 @@ def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any df = wr.neptune.read_gremlin(client, "g.V().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 2) - - df = wr.neptune.read_gremlin(client, "g.V().limit(1)", nest_event_loop=True) - assert isinstance(df, pd.DataFrame) - assert df.shape == (1, 2) - - df = wr.neptune.read_gremlin(client, "g.V().limit(2)", nest_event_loop=True) - assert isinstance(df, pd.DataFrame) - assert df.shape == (2, 2) def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: @@ -102,23 +97,12 @@ def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert isinstance(df, pd.DataFrame) assert df.shape == (2, 4) - df = wr.neptune.read_gremlin(client, "g.E().limit(1)", nest_event_loop=True) - assert isinstance(df, pd.DataFrame) - assert df.shape == (1, 4) - - df = wr.neptune.read_gremlin(client, "g.E().limit(2)", nest_event_loop=True) - assert isinstance(df, pd.DataFrame) - assert df.shape == (2, 4) - def test_gremlin_query_no_results(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) df = wr.neptune.read_gremlin(client, "g.V('foo').drop()") assert isinstance(df, pd.DataFrame) - - df = wr.neptune.read_gremlin(client, "g.V('foo').drop()", nest_event_loop=True) - assert isinstance(df, pd.DataFrame) def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: @@ -129,4 +113,71 @@ def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 2") assert isinstance(df, pd.DataFrame) - assert df.shape == (2,3) \ No newline at end of file + assert df.shape == (2,3) + + +def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + initial_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + data = [_create_dummy_vertex(), _create_dummy_vertex(), _create_dummy_vertex()] + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df) + assert res + + final_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 + + # check to make sure batch addition of vertices works + data=[] + for i in range(0, 50): + data.append(_create_dummy_vertex()) + + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df) + assert res + + batch_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 + +def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + initial_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + + data = [_create_dummy_edge(), _create_dummy_edge(), _create_dummy_edge()] + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df) + assert res + + final_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 + + # check to make sure batch addition of edges works + data=[] + for i in range(0, 50): + data.append(_create_dummy_edge()) + + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df) + assert res + + batch_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 + +def _create_dummy_vertex() -> Dict[str, Any]: + data = dict() + data['~id']=uuid.uuid4() + data['~label']='foo' + data['int'] = random.randint(0, 1000) + data['str'] = ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + data['list'] = [random.randint(0, 1000), random.randint(0, 1000)] + return data + +def _create_dummy_edge() -> Dict[str, Any]: + data = dict() + data['~id']=uuid.uuid4() + data['~label']='bar' + data['~to']=uuid.uuid4() + data['~from']=uuid.uuid4() + data['int'] = random.randint(0, 1000) + data['str'] = ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + return data \ No newline at end of file From b4531c2cf5ca92973576720f47ed1bb2d5aa33ac Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 4 Feb 2022 17:45:01 -0900 Subject: [PATCH 10/32] [skip ci] Refactored code to simplify the locations into a utils class. Moved the Gremlin parsing code out of client and into its own static class --- awswrangler/neptune/__init__.py | 15 +- awswrangler/neptune/_utils.py | 119 +++++++++++++++ awswrangler/neptune/client.py | 102 +++---------- awswrangler/neptune/gremlin_parser.py | 64 ++++++++ awswrangler/neptune/neptune.py | 97 ++++++------ tests/test_neptune.py | 34 ++--- tests/test_neptune_parsing.py | 209 ++++++++++++++++++++++++++ 7 files changed, 486 insertions(+), 154 deletions(-) create mode 100644 awswrangler/neptune/_utils.py create mode 100644 awswrangler/neptune/gremlin_parser.py create mode 100644 tests/test_neptune_parsing.py diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 3e36d5424..758fe3263 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,14 +1,15 @@ """Utilities Module for Amazon Neptune.""" -from awswrangler.neptune.neptune import read_gremlin, read_opencypher, read_sparql, to_property_graph, to_rdf_graph -from awswrangler.neptune.client import connect, NeptuneClient +from awswrangler.neptune.neptune import execute_gremlin, execute_opencypher, execute_sparql, to_property_graph, \ + to_rdf_graph, connect +from awswrangler.neptune.gremlin_parser import GremlinParser +from awswrangler.neptune.client import NeptuneClient __all__ = [ - "read_gremlin", - "read_opencypher", - "read_sparql", + "execute_gremlin", + "execute_opencypher", + "execute_sparql", "to_property_graph", "to_rdf_graph", - "connect", - "get_graph_traversal_source" + "connect" ] diff --git a/awswrangler/neptune/_utils.py b/awswrangler/neptune/_utils.py new file mode 100644 index 000000000..241a5bad6 --- /dev/null +++ b/awswrangler/neptune/_utils.py @@ -0,0 +1,119 @@ +from awswrangler.neptune.client import NeptuneClient +import pandas as pd +from gremlin_python.process.graph_traversal import GraphTraversalSource +from gremlin_python.process.traversal import T +from gremlin_python.process.graph_traversal import __ +from gremlin_python.structure.graph import Graph + +from gremlin_python.process.traversal import Cardinality +from gremlin_python.process.translator import Translator +from typing import Dict, Any +from enum import Enum + +import logging + +_logger: logging.Logger = logging.getLogger(__name__) + + +class WriteDFType(Enum): + VERTEX = 1 + EDGE = 2 + UPDATE = 3 + + +def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, batch_size: int) -> bool: + """Write the provided dataframe using Gremlin + + Args: + client (NeptuneClient): The Neptune client to write the dataframe + df (pd.DataFrame): The dataframe to write + mode (WriteDFType): The type of dataframe to write + batch_size (int): The size of the batch to write + + Raises: + Exception: The underlying write exception is raised + + Returns: + (bool): True if the write operation succeeded + """ + g = Graph().traversal() + # Loop through items in the DF + for (index, row) in df.iterrows(): + # build up a query + if mode == WriteDFType.EDGE: + g = _build_gremlin_edges(g, row.to_dict()) + elif mode == WriteDFType.VERTEX: + g = _build_gremlin_vertices(g, row.to_dict()) + else: + g = _build_gremlin_update(g, row.to_dict()) + # run the query + if index > 0 and index % batch_size == 0: + res = _run_gremlin_insert(client, g) + if res: + g = Graph().traversal() + else: + raise Exception("Need to fix why this errors") + + return _run_gremlin_insert(client, g) + + +def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: + translator = Translator('g') + s = translator.translate(g.bytecode) + s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality + _logger.debug(s) + res = client.write_gremlin(s) + return res + + +def _build_gremlin_update(g: GraphTraversalSource, row: Dict) -> GraphTraversalSource: + g = g.V(str(row['~id'])) + g = _build_gremlin_properties(g, row) + + return g + + +def _build_gremlin_vertices(g: GraphTraversalSource, row: Dict) -> GraphTraversalSource: + g = (g.V(str(row['~id'])). + fold(). + coalesce( + __.unfold(), + __.addV(row['~label']).property(T.id, str(row['~id']))) + ) + g = _build_gremlin_properties(g, row) + + return g + + +def _build_gremlin_edges(g: GraphTraversalSource, row: pd.Series) -> GraphTraversalSource: + g = (g.V(str(row['~from'])). + fold(). + coalesce( + __.unfold(), + _build_gremlin_vertices( + __, {"~id": row['~from'], "~label": "Vertex"})). + addE(row['~label']). + to( + __.V(str(row['~to'])). + fold(). + coalesce( + __.unfold(), + _build_gremlin_vertices( + __, {"~id": row['~to'], "~label": "Vertex"}))) + ) + g = _build_gremlin_properties(g, row) + + return g + + +def _build_gremlin_properties(g, row): + for (column, value) in row.items(): + if column not in ['~id', '~label', '~to', '~from']: + if type(value) is list and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + return g + + diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index e1cb151a6..48dedf31a 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -5,12 +5,8 @@ import requests from typing import Dict, Optional, Any from gremlin_python.driver import client -from gremlin_python.structure.graph import Path -from gremlin_python.structure.graph import Vertex -from gremlin_python.structure.graph import Edge -from gremlin_python.structure.graph import VertexProperty -from gremlin_python.structure.graph import Property -from gremlin_python.structure.graph import Graph +from awswrangler.neptune.gremlin_parser import GremlinParser + import logging _logger: logging.Logger = logging.getLogger(__name__) @@ -21,8 +17,9 @@ WS_PROTOCOL = 'wss' -class NeptuneClient(): - def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = False, boto3_session: Optional[boto3.Session] = None, region: Optional[str] = None): +class NeptuneClient: + def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = False, + boto3_session: Optional[boto3.Session] = None, region: Optional[str] = None): self.host = host self.port = port self.iam_enabled = iam_enabled @@ -33,7 +30,6 @@ def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = Fals self.region = region self._http_session = requests.Session() - def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -41,8 +37,8 @@ def __get_region_from_session(self) -> str: return region raise exceptions.InvalidArgument("There is no region_name defined on boto3, please configure it.") - - def __ensure_session(self, session: boto3.Session = None) -> boto3.Session: + @staticmethod + def __ensure_session(session: boto3.Session = None) -> boto3.Session: """Ensure that a valid boto3.Session will be returned.""" if session is not None: return session @@ -51,8 +47,8 @@ def __ensure_session(self, session: boto3.Session = None) -> boto3.Session: else: return boto3.Session() - - def _prepare_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> requests.PreparedRequest: + def _prepare_request(self, method, url, *, data=None, params=None, headers=None, + service=NEPTUNE_SERVICE_NAME) -> requests.PreparedRequest: request = requests.Request(method=method, url=url, data=data, params=params, headers=headers) if self.boto3_session is not None: aws_request = self._get_aws_request(method=method, url=url, data=data, params=params, headers=headers, @@ -61,8 +57,8 @@ def _prepare_request(self, method, url, *, data=None, params=None, headers=None, return request.prepare() - - def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> AWSRequest: + def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, + service=NEPTUNE_SERVICE_NAME) -> AWSRequest: req = AWSRequest(method=method, url=url, data=data, params=params, headers=headers) if self.iam_enabled: credentials = self.boto3_session.get_credentials() @@ -81,8 +77,7 @@ def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, else: return req - - def read_opencypher(self, query: str, headers:Dict[str, Any] = None) -> Dict[str, Any]: + def read_opencypher(self, query: str, headers: Dict[str, Any] = None) -> Dict[str, Any]: if headers is None: headers = {} @@ -98,14 +93,13 @@ def read_opencypher(self, query: str, headers:Dict[str, Any] = None) -> Dict[str res = self._http_session.send(req) return res.json()['results'] - - def read_gremlin(self, query) -> Dict[str, Any]: + def read_gremlin(self, query) -> Any: return self._execute_gremlin(query) def write_gremlin(self, query) -> bool: self._execute_gremlin(query) return True - + def _execute_gremlin(self, query) -> Dict[str, Any]: try: uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin' @@ -116,16 +110,15 @@ def _execute_gremlin(self, query) -> Dict[str, Any]: future_results = result.all() results = future_results.result() c.close() - return self._gremlin_results_to_dict(results) + return GremlinParser.gremlin_results_to_dict(results) except Exception as e: c.close() raise e - - def read_sparql(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: + def read_sparql(self, query, headers: Dict[str, Any] = None) -> Dict[str, Any]: if headers is None: headers = {} - + data = {'query': query} if 'content-type' not in headers: @@ -136,7 +129,6 @@ def read_sparql(self, query, headers:Dict[str, Any] = None) -> Dict[str, Any]: res = self._http_session.send(req) return res - def status(self): url = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/status' req = self._prepare_request('GET', url, data='') @@ -147,63 +139,3 @@ def status(self): _logger.error("Error connecting to Amazon Neptune cluster. Please verify your connection details") raise ConnectionError(res.status_code) - - def get_graph_traversal_source(self): - return Graph().traversal() - - - def _gremlin_results_to_dict(self, result) -> Dict[str, Any]: - res=[] - - # For lists or paths unwind them - if isinstance(result, list) or isinstance(result, Path): - for x in result: - res.append(self._parse_dict(x)) - - # For dictionaries just add them - elif isinstance(result, dict): - res.append(result) - - # For everything else parse them - else: - res.append(self._parse_dict(result)) - return res - - - def _parse_dict(self, data) -> Dict[str, Any]: - d = dict() - - # If this is a list or Path then unwind it - if isinstance(data, list) or isinstance(data, Path): - res=[] - for x in data: - res.append(self._parse_dict(x)) - return res - - # If this is an element then make it a dictionary - elif isinstance(data, Vertex) or isinstance(data,Edge) or isinstance(data, VertexProperty) or isinstance(data, Property): - data=data.__dict__ - - # If this is a scalar then create a Map with it - elif not hasattr(data, "__len__") or isinstance(data, str): - data = {0: data} - - for (k, v) in data.items(): - # If the key is a Vertex or an Edge do special processing - if isinstance(k, Vertex) or isinstance(k, Edge): - k = k.id - - # If the value is a list do special processing to make it a scalar if the list is of length 1 - if isinstance(v, list) and len(v) == 1: - d[k] = v[0] - else: - d[k] = v - - # If the value is a Vertex or Edge do special processing - if isinstance(d[k], Vertex) or isinstance(d[k], Edge) or isinstance(d[k], VertexProperty) or isinstance(d[k], Property): - d[k] = d[k].__dict__ - return d - - -def connect(host: str, port: str, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: - return NeptuneClient(host, port, iam_enabled, **kwargs) diff --git a/awswrangler/neptune/gremlin_parser.py b/awswrangler/neptune/gremlin_parser.py new file mode 100644 index 000000000..76873c21e --- /dev/null +++ b/awswrangler/neptune/gremlin_parser.py @@ -0,0 +1,64 @@ +from typing import Dict, Any + +from gremlin_python.structure.graph import Path +from gremlin_python.structure.graph import Vertex +from gremlin_python.structure.graph import Edge +from gremlin_python.structure.graph import VertexProperty +from gremlin_python.structure.graph import Property + + +class GremlinParser: + @staticmethod + def gremlin_results_to_dict(result) -> Dict[str, Any]: + res = [] + + # For lists or paths unwind them + if isinstance(result, list) or isinstance(result, Path): + for x in result: + res.append(GremlinParser._parse_dict(x)) + + # For dictionaries just add them + elif isinstance(result, dict): + res.append(result) + + # For everything else parse them + else: + res.append(GremlinParser._parse_dict(result)) + return res + + @staticmethod + def _parse_dict(data) -> Dict[str, Any]: + d = dict() + + # If this is a list or Path then unwind it + if isinstance(data, list) or isinstance(data, Path): + res = [] + for x in data: + res.append(GremlinParser._parse_dict(x)) + return res + + # If this is an element then make it a dictionary + elif isinstance(data, Vertex) or isinstance(data, Edge) or isinstance(data, VertexProperty) or isinstance(data, + Property): + data = data.__dict__ + + # If this is a scalar then create a Map with it + elif not hasattr(data, "__len__") or isinstance(data, str): + data = {0: data} + + for (k, v) in data.items(): + # If the key is a Vertex or an Edge do special processing + if isinstance(k, Vertex) or isinstance(k, Edge): + k = k.id + + # If the value is a list do special processing to make it a scalar if the list is of length 1 + if isinstance(v, list) and len(v) == 1: + d[k] = v[0] + else: + d[k] = v + + # If the value is a Vertex or Edge do special processing + if isinstance(d[k], Vertex) or isinstance(d[k], Edge) or isinstance(d[k], VertexProperty) or isinstance( + d[k], Property): + d[k] = d[k].__dict__ + return d diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index e8d6aaf45..4dec0035a 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,25 +1,24 @@ from pyparsing import col from awswrangler.neptune.client import NeptuneClient -from typing import Any, Dict +from typing import Dict, Any import pandas as pd from awswrangler import exceptions from gremlin_python.process.graph_traversal import GraphTraversalSource -from gremlin_python.process.anonymous_traversal import AnonymousTraversalSource from gremlin_python.process.translator import Translator from gremlin_python.process.traversal import T from gremlin_python.process.graph_traversal import __ from gremlin_python.process.traversal import Cardinality +from gremlin_python.structure.graph import Graph import logging - _logger: logging.Logger = logging.getLogger(__name__) -def read_gremlin( - client: NeptuneClient, - query: str, - **kwargs +def execute_gremlin( + client: NeptuneClient, + query: str, + **kwargs ) -> pd.DataFrame: """Return results of a Gremlin traversal as pandas dataframe. @@ -41,16 +40,16 @@ def read_gremlin( >>> import awswrangler as wr >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) - >>> df = wr.neptune.read_gremlin(client, "g.V().limit(1)") + >>> df = wr.neptune.execute_gremlin(client, "g.V().limit(1)") """ results = client.read_gremlin(query, **kwargs) df = pd.DataFrame.from_records(results) return df -def read_opencypher( - client: NeptuneClient, - query: str +def execute_opencypher( + client: NeptuneClient, + query: str ) -> pd.DataFrame: """Return results of a openCypher traversal as pandas dataframe. @@ -72,16 +71,16 @@ def read_opencypher( >>> import awswrangler as wr >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=False) - >>> resp = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 1") + >>> resp = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 1") """ resp = client.read_opencypher(query) df = pd.DataFrame.from_dict(resp) return df -def read_sparql( - client: NeptuneClient, - query: str +def execute_sparql( + client: NeptuneClient, + query: str ) -> pd.DataFrame: """Return results of a SPARQL query as pandas dataframe. @@ -103,7 +102,7 @@ def read_sparql( >>> import awswrangler as wr >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') - >>> df = wr.neptune.read_sparql(client, "PREFIX foaf: + >>> df = wr.neptune.execute_sparql(client, "PREFIX foaf: SELECT ?name WHERE { ?person foaf:name ?name . @@ -117,10 +116,10 @@ def read_sparql( def to_property_graph( - client: NeptuneClient, - df: pd.DataFrame, - batch_size: int=50, - **kwargs + client: NeptuneClient, + df: pd.DataFrame, + batch_size: int = 50, + **kwargs ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -153,40 +152,41 @@ def to_property_graph( ... df=df ... ) """ - #check if ~id and ~label column exist and if not throw error - g = client.get_graph_traversal_source() - is_edge_df=False + # check if ~id and ~label column exist and if not throw error + g = Graph().traversal() + is_edge_df = False if '~id' in df.columns and '~label' in df.columns: if '~to' in df.columns and '~from' in df.columns: - is_edge_df=True + is_edge_df = True else: raise exceptions.InvalidArgumentValue( - "Dataframe must contain at least a ~id and a ~label column to be saved to Amazon Neptune" + "Dataframe must contain at least a ~id and a ~label column to be saved to Amazon Neptune" ) - #Loop through items in the DF + # Loop through items in the DF for (index, row) in df.iterrows(): # build up a query if is_edge_df: - g=_build_gremlin_insert_edges(g, row.to_dict()) + g = _build_gremlin_insert_edges(g, row.to_dict()) else: - g=_build_gremlin_insert_vertices(g, row.to_dict()) + g = _build_gremlin_insert_vertices(g, row.to_dict()) # run the query if index > 0 and index % batch_size == 0: res = _run_gremlin_insert(client, g) if res: - g = client.get_graph_traversal_source() + g = Graph().traversal() else: raise Exception("Need to fix why this errors") return _run_gremlin_insert(client, g) -def _build_gremlin_insert_vertices(g:GraphTraversalSource, row:Dict) -> str: + +def _build_gremlin_insert_vertices(g: GraphTraversalSource, row: Dict) -> str: g = (g.V(str(row['~id'])). fold(). coalesce( - __.unfold(), - __.addV(row['~label']).property(T.id, str(row['~id']))) + __.unfold(), + __.addV(row['~label']).property(T.id, str(row['~id']))) ) for (column, value) in row.items(): if column not in ['~id', '~label']: @@ -198,15 +198,17 @@ def _build_gremlin_insert_vertices(g:GraphTraversalSource, row:Dict) -> str: return g -def _build_gremlin_insert_edges(g:GraphTraversalSource, row:pd.Series) -> str: + +def _build_gremlin_insert_edges(g: GraphTraversalSource, row: pd.Series) -> str: g = (g.V(str(row['~from'])). - fold(). - coalesce( - __.unfold(), - _build_gremlin_insert_vertices(__, {"~id": row['~from'], "~label": "Vertex" })). - addE(row['~label']). - to(__.V(str(row['~to'])).fold().coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row['~to'], "~label": "Vertex" }))) - ) + fold(). + coalesce( + __.unfold(), + _build_gremlin_insert_vertices(__, {"~id": row['~from'], "~label": "Vertex"})). + addE(row['~label']). + to(__.V(str(row['~to'])).fold().coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row['~to'], + "~label": "Vertex"}))) + ) for (column, value) in row.items(): if column not in ['~id', '~label', '~to', '~from']: if type(value) is list and len(value) > 0: @@ -214,20 +216,22 @@ def _build_gremlin_insert_edges(g:GraphTraversalSource, row:pd.Series) -> str: g = g.property(Cardinality.set_, column, item) elif not pd.isna(value) and not pd.isnull(value): g = g.property(column, value) - + return g -def _run_gremlin_insert(client:NeptuneClient, g:GraphTraversalSource) -> bool: + +def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: translator = Translator('g') s = translator.translate(g.bytecode) - s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality + s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality _logger.debug(s) res = client.write_gremlin(s) return res + def to_rdf_graph( - client: NeptuneClient, - df: pd.DataFrame + client: NeptuneClient, + df: pd.DataFrame ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -257,3 +261,6 @@ def to_rdf_graph( """ raise NotImplementedError + +def connect(host: str, port: str, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: + return NeptuneClient(host, port, iam_enabled, **kwargs) diff --git a/tests/test_neptune.py b/tests/test_neptune.py index d80d11a26..e83e66a0d 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -54,19 +54,19 @@ def test_connection_neptune_https_iam(neptune_endpoint, neptune_port): def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 1") + df = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 1) assert isinstance(df, pd.DataFrame) - df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN n LIMIT 2") + df = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 2") assert df.shape == (2, 1) - df = wr.neptune.read_opencypher(client, "MATCH p=(n)-[r]->(d) RETURN p LIMIT 1") + df = wr.neptune.execute_opencypher(client, "MATCH p=(n)-[r]->(d) RETURN p LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 1) - df = wr.neptune.read_opencypher(client, "MATCH (n) RETURN id(n), labels(n) LIMIT 1") + df = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN id(n), labels(n) LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 2) row = df.iloc[0] @@ -77,11 +77,11 @@ def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - df = wr.neptune.read_gremlin(client, "g.V().limit(1)") + df = wr.neptune.execute_gremlin(client, "g.V().limit(1)") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 2) - df = wr.neptune.read_gremlin(client, "g.V().limit(2)") + df = wr.neptune.execute_gremlin(client, "g.V().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 2) @@ -89,11 +89,11 @@ def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - df = wr.neptune.read_gremlin(client, "g.E().limit(1)") + df = wr.neptune.execute_gremlin(client, "g.E().limit(1)") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 4) - df = wr.neptune.read_gremlin(client, "g.E().limit(2)") + df = wr.neptune.execute_gremlin(client, "g.E().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 4) @@ -101,30 +101,30 @@ def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: def test_gremlin_query_no_results(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - df = wr.neptune.read_gremlin(client, "g.V('foo').drop()") + df = wr.neptune.execute_gremlin(client, "g.V('foo').drop()") assert isinstance(df, pd.DataFrame) def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 1") + df = wr.neptune.execute_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1,3) - df = wr.neptune.read_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 2") + df = wr.neptune.execute_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 2") assert isinstance(df, pd.DataFrame) assert df.shape == (2,3) def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - initial_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + initial_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") data = [_create_dummy_vertex(), _create_dummy_vertex(), _create_dummy_vertex()] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - final_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + final_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 # check to make sure batch addition of vertices works @@ -136,19 +136,19 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any res = wr.neptune.to_property_graph(client, df) assert res - batch_cnt_df = wr.neptune.read_gremlin(client, "g.V().hasLabel('foo').count()") + batch_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - initial_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + initial_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") data = [_create_dummy_edge(), _create_dummy_edge(), _create_dummy_edge()] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - final_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + final_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 # check to make sure batch addition of edges works @@ -160,7 +160,7 @@ def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: res = wr.neptune.to_property_graph(client, df) assert res - batch_cnt_df = wr.neptune.read_gremlin(client, "g.E().hasLabel('bar').count()") + batch_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 def _create_dummy_vertex() -> Dict[str, Any]: diff --git a/tests/test_neptune_parsing.py b/tests/test_neptune_parsing.py new file mode 100644 index 000000000..02bf3cdf3 --- /dev/null +++ b/tests/test_neptune_parsing.py @@ -0,0 +1,209 @@ +import logging + +import pandas as pd +import pytest # type: ignore +from gremlin_python.structure.graph import Path +from gremlin_python.structure.graph import Vertex +from gremlin_python.structure.graph import Edge +from gremlin_python.structure.graph import VertexProperty +from gremlin_python.structure.graph import Property +from gremlin_python.process.traversal import T + +import awswrangler as wr + +logging.getLogger("awswrangler").setLevel(logging.DEBUG) + +@pytest.fixture(scope="session") +def gremlin_parser() -> wr.neptune.GremlinParser: + c = object.__new__(wr.neptune.GremlinParser) + return c + + +#parse Vertex elements +def test_parse_gremlin_vertex_elements(gremlin_parser): + # parse vertex elements + v = Vertex("foo") + input = [v] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['id'] == 'foo' + assert row['label'] == 'vertex' + + # parse multiple vertex elements + v1 = Vertex("bar") + input = [v, v1] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,2) + assert row['id'] == 'bar' + assert row['label'] == 'vertex' + + +#parse Edge elements +def test_parse_gremlin_edge_elements(gremlin_parser): + # parse edge elements + v = Edge("foo", 'out1', 'label', 'in1') + input = [v] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,4) + assert row['id'] == 'foo' + assert row['outV'] == 'out1' + assert row['label'] == 'label' + assert row['inV'] == 'in1' + + # parse multiple edge elements + v1 = Edge("bar", 'out1', 'label', 'in2') + input = [v, v1] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,4) + assert row['id'] == 'bar' + assert row['outV'] == 'out1' + assert row['label'] == 'label' + assert row['inV'] == 'in2' + + +#parse Property elements +def test_parse_gremlin_property_elements(gremlin_parser): + # parse VertexProperty elements + v = VertexProperty("foo", 'name', 'bar', 'v1') + input = [v] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,5) + assert row['id'] == 'foo' + assert row['label'] == 'name' + assert row['value'] == 'bar' + assert row['key'] == 'name' + assert row['vertex'] == 'v1' + + v = Property("foo", 'name', 'bar') + input = [v] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row['element'] == 'bar' + assert row['value'] == 'name' + assert row['key'] == 'foo' + + +#parse Path elements +def test_parse_gremlin_path_elements(gremlin_parser): + #parse path with elements + v = Vertex("foo") + v2 = Vertex("bar") + e1 = Edge("e1", 'foo', 'label', 'bar') + p = Path(labels=["vertex", "label", "vertex"], objects=[v, e1, v2]) + out = gremlin_parser.gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0] == {'id': 'foo', 'label': 'vertex'} + assert row[1] == {'id': 'e1', 'label': 'label', 'outV': 'foo', 'inV': 'bar'} + assert row[2] == {'id': 'bar', 'label': 'vertex'} + + #parse path with multiple elements + e2 = Edge("bar", 'out1', 'label', 'in2') + v3 = Vertex("in2") + p1 = Path(labels=["vertex", "label", "vertex"], objects=[v2, e2, v3]) + out = gremlin_parser.gremlin_results_to_dict([p, p1]) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,3) + assert row[0] == {'id': 'bar', 'label': 'vertex'} + assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} + assert row[2] == {'id': 'in2', 'label': 'vertex'} + + #parse path with maps + p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, {'dist': 32}, {'name': 'bar', 'age': 40}]) + out = gremlin_parser.gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0]['name'] == 'foo' + assert row[0]['age'] == 29 + assert row[1]['dist'] == 32 + assert row[2]['name'] == 'bar' + assert row[2]['age'] == 40 + + #parse path with mixed elements and maps + p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, + Edge("bar", 'out1', 'label', 'in2'), {'name': 'bar', 'age': 40}]) + out = gremlin_parser.gremlin_results_to_dict([p]) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,3) + assert row[0]['name'] == 'foo' + assert row[0]['age'] == 29 + assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} + assert row[2]['name'] == 'bar' + assert row[2]['age'] == 40 + + +#parse vertex valueMap +def test_parse_gremlin_maps(gremlin_parser): + # parse map + m = {'name': 'foo', 'age': 29} + input = [m] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['name'] == 'foo' + assert row['age'] == 29 + + # parse multiple maps with T + m1 = {'name': ['foo'], T.id: '2', 'age': [40], T.label: 'vertex'} + input = [m, m1] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[1] + assert df.shape == (2,4) + assert row['name'] == 'foo' + assert row['age'] == 40 + assert row[T.id] == '2' + assert row[T.label] == 'vertex' + m2 = {'name': ['foo', 'bar'], T.id: '2', T.label: 'vertex'} + input = [m, m1, m2] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[2] + assert df.shape == (3,4) + assert row['name'] == ['foo', 'bar'] + assert row[T.id] == '2' + assert row[T.label] == 'vertex' + + +#parse scalar +def test_parse_gremlin_scalar(gremlin_parser): + # parse map + m = 12 + n = "Foo" + input = [m, n] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (2,1) + assert row[0] == 12 + row = df.iloc[1] + assert row[0] == "Foo" + + +#parse subgraph +def test_parse_gremlin_subgraph(gremlin_parser): + m = {'@type': 'tinker:graph', '@value': {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']}} + input = [m] + out = gremlin_parser.gremlin_results_to_dict(input) + df = pd.DataFrame.from_records(out) + row = df.iloc[0] + assert df.shape == (1,2) + assert row['@type'] == 'tinker:graph' + assert row['@value'] == {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']} From 9df814c2659b6d9bec931dd183571686827d2821 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Thu, 10 Feb 2022 14:34:43 -0900 Subject: [PATCH 11/32] [skip ci] Added SPARQL write functionality as well as added neptune to database infra scripting --- awswrangler/neptune/client.py | 20 +- awswrangler/neptune/neptune.py | 44 +- test_infra/poetry.lock | 689 ++++++++++++++------------- test_infra/pyproject.toml | 1 + test_infra/stacks/databases_stack.py | 15 + tests/test_neptune.py | 65 ++- 6 files changed, 498 insertions(+), 336 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 48dedf31a..6721692a9 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -30,6 +30,7 @@ def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = Fals self.region = region self._http_session = requests.Session() + def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -47,6 +48,7 @@ def __ensure_session(session: boto3.Session = None) -> boto3.Session: else: return boto3.Session() + def _prepare_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> requests.PreparedRequest: request = requests.Request(method=method, url=url, data=data, params=params, headers=headers) @@ -57,6 +59,7 @@ def _prepare_request(self, method, url, *, data=None, params=None, headers=None, return request.prepare() + def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, service=NEPTUNE_SERVICE_NAME) -> AWSRequest: req = AWSRequest(method=method, url=url, data=data, params=params, headers=headers) @@ -77,6 +80,7 @@ def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, else: return req + def read_opencypher(self, query: str, headers: Dict[str, Any] = None) -> Dict[str, Any]: if headers is None: headers = {} @@ -116,11 +120,23 @@ def _execute_gremlin(self, query) -> Dict[str, Any]: raise e def read_sparql(self, query, headers: Dict[str, Any] = None) -> Dict[str, Any]: + return self._execute_sparql(query, headers) + + + def write_sparql(self, query, headers: Dict[str, Any] = None) -> Dict[str, Any]: + self._execute_sparql(query, headers, is_update=True) + return True + + + def _execute_sparql(self, query, headers, is_update=False): if headers is None: headers = {} - data = {'query': query} - + if is_update: + data = {'update': query} + else: + data = {'query': query} + if 'content-type' not in headers: headers['content-type'] = 'application/x-www-form-urlencoded' diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 4dec0035a..49cabda66 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,4 +1,3 @@ -from pyparsing import col from awswrangler.neptune.client import NeptuneClient from typing import Dict, Any import pandas as pd @@ -18,7 +17,7 @@ def execute_gremlin( client: NeptuneClient, query: str, - **kwargs + **kwargs: str ) -> pd.DataFrame: """Return results of a Gremlin traversal as pandas dataframe. @@ -118,8 +117,7 @@ def execute_sparql( def to_property_graph( client: NeptuneClient, df: pd.DataFrame, - batch_size: int = 50, - **kwargs + batch_size: int = 50 ) -> None: """Write records stored in a DataFrame into Amazon Neptune. @@ -175,8 +173,6 @@ def to_property_graph( res = _run_gremlin_insert(client, g) if res: g = Graph().traversal() - else: - raise Exception("Need to fix why this errors") return _run_gremlin_insert(client, g) @@ -231,11 +227,17 @@ def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: def to_rdf_graph( client: NeptuneClient, - df: pd.DataFrame + df: pd.DataFrame, + batch_size: int = 50, + subject_column:str = 's', + predicate_column:str = 'p', + object_column:str = 'o', + graph_column:str = 'g' ) -> None: """Write records stored in a DataFrame into Amazon Neptune. - The DataFrame must consist of triples with column names of s, p, and o. + The DataFrame must consist of triples with column names for the subject, predicate, and object specified. + If you want to add data into a named graph then you will also need the graph column. Parameters ---------- @@ -259,7 +261,31 @@ def to_rdf_graph( ... df=df ... ) """ - raise NotImplementedError + is_quads = False + if pd.Series([subject_column, object_column, predicate_column]).isin(df.columns).all(): + if graph_column in df.columns: + is_quads = True + else: + raise exceptions.InvalidArgumentValue( + "Dataframe must contain at least the subject, predicate, and object columns defined or the defaults (s, p, o) to be saved to Amazon Neptune" + ) + + query = "" + # Loop through items in the DF + for (index, row) in df.iterrows(): + # build up a query + if is_quads: + insert = f"INSERT DATA {{ GRAPH <{row[graph_column]}> {{<{row[subject_column]}> <{str(row[predicate_column])}> <{row[object_column]}> . }} }}; " + query = query + insert + else: + insert = f"INSERT DATA {{ <{row[subject_column]}> <{str(row[predicate_column])}> <{row[object_column]}> . }}; " + query = query + insert + # run the query + if index > 0 and index % batch_size == 0: + res = client.write_sparql(query) + if res: + query="" + return client.write_sparql(query) def connect(host: str, port: str, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: diff --git a/test_infra/poetry.lock b/test_infra/poetry.lock index 164515b8b..1f9d516c9 100644 --- a/test_infra/poetry.lock +++ b/test_infra/poetry.lock @@ -14,639 +14,674 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "aws-cdk.assets" -version = "1.130.0" +version = "1.144.0" description = "This module is deprecated. All types are now available under the core module" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" +publication = ">=0.0.3" + +[[package]] +name = "aws-cdk.aws-acmpca" +version = "1.144.0" +description = "The CDK Construct Library for AWS::ACMPCA" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +"aws-cdk.core" = "1.144.0" +constructs = ">=3.3.69,<4.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-applicationautoscaling" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::ApplicationAutoScaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-autoscaling-common" = "1.130.0" -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-autoscaling-common" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-autoscaling-common" -version = "1.130.0" +version = "1.144.0" description = "Common implementation package for @aws-cdk/aws-autoscaling and @aws-cdk/aws-applicationautoscaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-certificatemanager" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::CertificateManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-lambda" = "1.130.0" -"aws-cdk.aws-route53" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-acmpca" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-lambda" = "1.144.0" +"aws-cdk.aws-route53" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudformation" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::CloudFormation" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-lambda" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-sns" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-lambda" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-sns" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudwatch" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::CloudWatch" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codeguruprofiler" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::CodeGuruProfiler" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codestarnotifications" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::CodeStarNotifications" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ec2" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::EC2" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-s3-assets" = "1.130.0" -"aws-cdk.aws-ssm" = "1.130.0" -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" -"aws-cdk.region-info" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-s3-assets" = "1.144.0" +"aws-cdk.aws-ssm" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.region-info" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr-assets" -version = "1.130.0" +version = "1.144.0" description = "Docker image assets deployed to ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.130.0" -"aws-cdk.aws-ecr" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.assets" = "1.144.0" +"aws-cdk.aws-ecr" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-efs" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::EFS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-events" -version = "1.130.0" +version = "1.144.0" description = "Amazon EventBridge Construct Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-glue" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Glue" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.130.0" -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-s3-assets" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.assets" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-s3-assets" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.custom-resources" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-iam" -version = "1.130.0" +version = "1.144.0" description = "CDK routines for easily assigning correct and minimal IAM permissions" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" -"aws-cdk.region-info" = "1.130.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.region-info" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-kms" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::KMS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-lakeformation" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::LakeFormation" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-lambda" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Lambda" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-applicationautoscaling" = "1.130.0" -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-codeguruprofiler" = "1.130.0" -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-ecr" = "1.130.0" -"aws-cdk.aws-ecr-assets" = "1.130.0" -"aws-cdk.aws-efs" = "1.130.0" -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-s3-assets" = "1.130.0" -"aws-cdk.aws-signer" = "1.130.0" -"aws-cdk.aws-sqs" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" -"aws-cdk.region-info" = "1.130.0" +"aws-cdk.aws-applicationautoscaling" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-codeguruprofiler" = "1.144.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-ecr" = "1.144.0" +"aws-cdk.aws-ecr-assets" = "1.144.0" +"aws-cdk.aws-efs" = "1.144.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-s3-assets" = "1.144.0" +"aws-cdk.aws-signer" = "1.144.0" +"aws-cdk.aws-sqs" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.region-info" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-logs" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Logs" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-s3-assets" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-s3-assets" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" +constructs = ">=3.3.69,<4.0.0" +jsii = ">=1.52.1,<2.0.0" +publication = ">=0.0.3" + +[[package]] +name = "aws-cdk.aws-neptune" +version = "1.144.0" +description = "The CDK Construct Library for AWS::Neptune" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-opensearchservice" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::OpenSearchService" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-certificatemanager" = "1.130.0" -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-route53" = "1.130.0" -"aws-cdk.aws-secretsmanager" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.custom-resources" = "1.130.0" +"aws-cdk.aws-certificatemanager" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-route53" = "1.144.0" +"aws-cdk.aws-secretsmanager" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.custom-resources" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-rds" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::RDS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-secretsmanager" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-secretsmanager" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-redshift" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Redshift" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-lambda" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.aws-secretsmanager" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.custom-resources" = "1.130.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-lambda" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.aws-secretsmanager" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.custom-resources" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-route53" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Route53" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.custom-resources" = "1.130.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.custom-resources" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3-assets" -version = "1.130.0" +version = "1.144.0" description = "Deploy local files and directories to S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-s3" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.assets" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-s3" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sam" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for the AWS Serverless Application Model (SAM) resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-secretsmanager" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::SecretsManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-lambda" = "1.130.0" -"aws-cdk.aws-sam" = "1.130.0" -"aws-cdk.core" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-lambda" = "1.144.0" +"aws-cdk.aws-sam" = "1.144.0" +"aws-cdk.core" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-signer" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::Signer" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.130.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sns" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::SNS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-codestarnotifications" = "1.130.0" -"aws-cdk.aws-events" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.aws-sqs" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-codestarnotifications" = "1.144.0" +"aws-cdk.aws-events" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.aws-sqs" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sqs" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::SQS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ssm" -version = "1.130.0" +version = "1.144.0" description = "The CDK Construct Library for AWS::SSM" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-kms" = "1.130.0" -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-kms" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cloud-assembly-schema" -version = "1.130.0" +version = "1.144.0" description = "Cloud Assembly Schema" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.core" -version = "1.130.0" +version = "1.144.0" description = "AWS Cloud Development Kit Core Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.130.0" -"aws-cdk.cx-api" = "1.130.0" -"aws-cdk.region-info" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.region-info" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.custom-resources" -version = "1.130.0" +version = "1.144.0" description = "Constructs for implementing CDK custom resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudformation" = "1.130.0" -"aws-cdk.aws-ec2" = "1.130.0" -"aws-cdk.aws-iam" = "1.130.0" -"aws-cdk.aws-lambda" = "1.130.0" -"aws-cdk.aws-logs" = "1.130.0" -"aws-cdk.aws-sns" = "1.130.0" -"aws-cdk.core" = "1.130.0" +"aws-cdk.aws-cloudformation" = "1.144.0" +"aws-cdk.aws-ec2" = "1.144.0" +"aws-cdk.aws-iam" = "1.144.0" +"aws-cdk.aws-lambda" = "1.144.0" +"aws-cdk.aws-logs" = "1.144.0" +"aws-cdk.aws-sns" = "1.144.0" +"aws-cdk.core" = "1.144.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cx-api" -version = "1.130.0" +version = "1.144.0" description = "Cloud executable protocol" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.130.0" -jsii = ">=1.41.0,<2.0.0" +"aws-cdk.cloud-assembly-schema" = "1.144.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.region-info" -version = "1.130.0" +version = "1.144.0" description = "AWS region information, such as service principal names" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.41.0,<2.0.0" +jsii = ">=1.52.1,<2.0.0" publication = ">=0.0.3" [[package]] @@ -703,7 +738,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [[package]] name = "jsii" -version = "1.42.0" +version = "1.52.1" description = "Python client for jsii runtime" category = "main" optional = false @@ -713,11 +748,11 @@ python-versions = "~=3.6" attrs = ">=21.2,<22.0" cattrs = [ {version = ">=1.0.0,<1.1.0", markers = "python_version < \"3.7\""}, - {version = ">=1.8.0,<1.9.0", markers = "python_version >= \"3.7\""}, + {version = ">=1.8,<1.11", markers = "python_version >= \"3.7\""}, ] importlib-resources = {version = "*", markers = "python_version < \"3.7\""} python-dateutil = "*" -typing-extensions = ">=3.7,<4.0" +typing-extensions = ">=3.7,<5.0" [[package]] name = "publication" @@ -768,8 +803,8 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" -python-versions = ">=3.6.2, <3.10" -content-hash = "6d22ad86171a44206a94d9e9d051c12bb4caf0215a7af535ae5e7d371011afc1" +python-versions = ">=3.6.2, <3.11" +content-hash = "1638e85d2b87fdbdb239ed675da0b745fe125e87dee9e84c09385b5dce05513f" [metadata.files] attrs = [ @@ -777,148 +812,156 @@ attrs = [ {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] "aws-cdk.assets" = [ - {file = "aws-cdk.assets-1.130.0.tar.gz", hash = "sha256:89628550ecfd4f2b3713cc515c5937ee766cc68cd39fc65dc15095a4fc92140f"}, - {file = "aws_cdk.assets-1.130.0-py3-none-any.whl", hash = "sha256:88ee75118c7b34506acac8a3390e0f6360227f95764749ecf0cb8160532fef8d"}, + {file = "aws-cdk.assets-1.144.0.tar.gz", hash = "sha256:4269651cb98e24d0c866fc00e2db07d2bbedbefa9da5639cff88ac0e1bba2bd4"}, + {file = "aws_cdk.assets-1.144.0-py3-none-any.whl", hash = "sha256:c43c00f62f4c17a53ef694054bbe025773ac6cf439c95ced115434f654418f5a"}, +] +"aws-cdk.aws-acmpca" = [ + {file = "aws-cdk.aws-acmpca-1.144.0.tar.gz", hash = "sha256:a807164e3f2025357b88d4cdab65b84f8781b4517aa536e5b556e4a5b2a41b08"}, + {file = "aws_cdk.aws_acmpca-1.144.0-py3-none-any.whl", hash = "sha256:4b4a43a6d8bf646118d786071f48f420e4333bc518f0c927b95cab430008039d"}, ] "aws-cdk.aws-applicationautoscaling" = [ - {file = "aws-cdk.aws-applicationautoscaling-1.130.0.tar.gz", hash = "sha256:c60000e6a2b86392efcfb32066207cff19adbdbe0b68e1ee4281cf5b52255b29"}, - {file = "aws_cdk.aws_applicationautoscaling-1.130.0-py3-none-any.whl", hash = "sha256:0bed99bbc03ae733450e03bd0c6b075fadc13ae0d9363fffa047e6de0d68be60"}, + {file = "aws-cdk.aws-applicationautoscaling-1.144.0.tar.gz", hash = "sha256:d8c05eb9516b705710a415ddc3922353cca0f137489a88524f99618a6980ea3c"}, + {file = "aws_cdk.aws_applicationautoscaling-1.144.0-py3-none-any.whl", hash = "sha256:d2b9ac828db47bdc6e6e4644693f92ef7e528ae10646367239cabbe6ab5b24d0"}, ] "aws-cdk.aws-autoscaling-common" = [ - {file = "aws-cdk.aws-autoscaling-common-1.130.0.tar.gz", hash = "sha256:bdc5eee7f30163daf0a40b78e888d356da9815057153791daa6bc2b3d1288541"}, - {file = "aws_cdk.aws_autoscaling_common-1.130.0-py3-none-any.whl", hash = "sha256:46bd7dffa2ff4bcb2c3ee86e4881d7994924f23f76b59fb346cbc48a7e5b90e4"}, + {file = "aws-cdk.aws-autoscaling-common-1.144.0.tar.gz", hash = "sha256:58ebdc1200bff4b14688376cbe910ed20eadb04eee59d940434ba47ccfd647f0"}, + {file = "aws_cdk.aws_autoscaling_common-1.144.0-py3-none-any.whl", hash = "sha256:5de9739de50921c71efa3a1d85e6856072225f14c2e2567afa5608180250ddce"}, ] "aws-cdk.aws-certificatemanager" = [ - {file = "aws-cdk.aws-certificatemanager-1.130.0.tar.gz", hash = "sha256:e95cb1c48e5b37afa10ff0bdac0c0793d276f0c00d26140c6707a1fb0db74dc8"}, - {file = "aws_cdk.aws_certificatemanager-1.130.0-py3-none-any.whl", hash = "sha256:23ceb0486f5e17ed230a651401ce9807faf3efffe793b0e1cef7e224f6ed25c9"}, + {file = "aws-cdk.aws-certificatemanager-1.144.0.tar.gz", hash = "sha256:73dbc3811ab67baf3b89f21bbaf8640f2437609bfa25033349044a8db28d13ed"}, + {file = "aws_cdk.aws_certificatemanager-1.144.0-py3-none-any.whl", hash = "sha256:95a568df2a9779c641c3da10c4f958d4336c07ae87859bc627167308957d844b"}, ] "aws-cdk.aws-cloudformation" = [ - {file = "aws-cdk.aws-cloudformation-1.130.0.tar.gz", hash = "sha256:c2176461dfd6bf46ad3143f9ca270e209e74d6a1f8d52e2260f4893b4b9ae228"}, - {file = "aws_cdk.aws_cloudformation-1.130.0-py3-none-any.whl", hash = "sha256:0509fc5b6b6a6bae3752fe04a4b3f24776254e28bc5688238602b904852bd2ec"}, + {file = "aws-cdk.aws-cloudformation-1.144.0.tar.gz", hash = "sha256:d6aa003c0ac6b933f2d83784f167179c7a3f6247dbac4218a82ff0c42f2ea728"}, + {file = "aws_cdk.aws_cloudformation-1.144.0-py3-none-any.whl", hash = "sha256:f31770f0eb5bf9531566473a39618e6fb8f319641e3f504bedef02ae6b318d62"}, ] "aws-cdk.aws-cloudwatch" = [ - {file = "aws-cdk.aws-cloudwatch-1.130.0.tar.gz", hash = "sha256:1034ca75148e8292d014927911ba45cb18fad459371988ed32afd4b9de999449"}, - {file = "aws_cdk.aws_cloudwatch-1.130.0-py3-none-any.whl", hash = "sha256:10cbd1b7047267a6a1f566e7f1bfd1e85932a709bbca5419226d1b84b7e0a0ee"}, + {file = "aws-cdk.aws-cloudwatch-1.144.0.tar.gz", hash = "sha256:6368a80900c58f675b6e9180697e328f03f1be5ab0a31e15d77340835bb3ab31"}, + {file = "aws_cdk.aws_cloudwatch-1.144.0-py3-none-any.whl", hash = "sha256:ef12cd5dcd9d11a4ab718263c04ba2e282f09fee293f3dbecc7934b6f2b4c51b"}, ] "aws-cdk.aws-codeguruprofiler" = [ - {file = "aws-cdk.aws-codeguruprofiler-1.130.0.tar.gz", hash = "sha256:b9d9473a3e052e3164759c3f1ee694b7fc9d604c92b4a3df36c31a1a92306917"}, - {file = "aws_cdk.aws_codeguruprofiler-1.130.0-py3-none-any.whl", hash = "sha256:0462eb79554b407bed707eda3f840956ec81d442ddfad4a1e93da20c89152835"}, + {file = "aws-cdk.aws-codeguruprofiler-1.144.0.tar.gz", hash = "sha256:20e039a32e38770628b9d19bbb164fa3a0de0902d5908adbf090a34006803a05"}, + {file = "aws_cdk.aws_codeguruprofiler-1.144.0-py3-none-any.whl", hash = "sha256:c09d6ecb7f40c85ce55067397a5220800e16cc870b69fa6add72208748a3b758"}, ] "aws-cdk.aws-codestarnotifications" = [ - {file = "aws-cdk.aws-codestarnotifications-1.130.0.tar.gz", hash = "sha256:3c7f66d4c377e4f509b2719be4a2b1ac6efdbc4ab416eb56947a57ddd9290e27"}, - {file = "aws_cdk.aws_codestarnotifications-1.130.0-py3-none-any.whl", hash = "sha256:89b8a5374616e732475f374acb1f8b26de20721e0d939dda733f7135754848e3"}, + {file = "aws-cdk.aws-codestarnotifications-1.144.0.tar.gz", hash = "sha256:afa8e2ea5c09b330e81fe9d52c5a14d69e2b88435ff5222ac48629f8fb902fc9"}, + {file = "aws_cdk.aws_codestarnotifications-1.144.0-py3-none-any.whl", hash = "sha256:5d0baa762350f0313fb6d308e059e8f4e1c9ceffdcc39f3d949d1111c5b4f5e9"}, ] "aws-cdk.aws-ec2" = [ - {file = "aws-cdk.aws-ec2-1.130.0.tar.gz", hash = "sha256:e0220bc03d44ad4e7f04c8efacd65c52c32faeac3d62a752d114e5606c47a6c2"}, - {file = "aws_cdk.aws_ec2-1.130.0-py3-none-any.whl", hash = "sha256:fde2b2252debcbdd309a74bf7f3c1b7aaa83a671511eb9f753105687e59cafc3"}, + {file = "aws-cdk.aws-ec2-1.144.0.tar.gz", hash = "sha256:19984399f8c444bee71f843c082ced51676b76df10ef5e7b6924d94c051799ed"}, + {file = "aws_cdk.aws_ec2-1.144.0-py3-none-any.whl", hash = "sha256:a8c77b256e89214d90b4a34e882369731c2271570f96455d77661a483f761284"}, ] "aws-cdk.aws-ecr" = [ - {file = "aws-cdk.aws-ecr-1.130.0.tar.gz", hash = "sha256:0c3aad603cc3f8e7cf2901d9a1365fe5110ff46f7d739b89333691219b186b92"}, - {file = "aws_cdk.aws_ecr-1.130.0-py3-none-any.whl", hash = "sha256:7a2f8720d2f23c3578979c53f486c66a8449e0fd8135c6e4f82d4bd653151ce9"}, + {file = "aws-cdk.aws-ecr-1.144.0.tar.gz", hash = "sha256:673cc596b60708525a0f4a7c6489f05ff66aa8b8c41aba12c5403cbe0944e3ca"}, + {file = "aws_cdk.aws_ecr-1.144.0-py3-none-any.whl", hash = "sha256:334f0397e7da4abdbacc341b351f46be8106031a6f91639eef1503d4849c35b8"}, ] "aws-cdk.aws-ecr-assets" = [ - {file = "aws-cdk.aws-ecr-assets-1.130.0.tar.gz", hash = "sha256:40ca779cde59bdc3fcd979385a2b87b8e5cb052e1a4ef76e43bd781458ea5ce3"}, - {file = "aws_cdk.aws_ecr_assets-1.130.0-py3-none-any.whl", hash = "sha256:ddf5078a87529b4e5c2216bb71579fc0489b4dcdab6e7d5246dd1e1d10263e29"}, + {file = "aws-cdk.aws-ecr-assets-1.144.0.tar.gz", hash = "sha256:5a9a94137da9756982e39e2ccc1499811c42d873b1c617c9cfc47b973b66191a"}, + {file = "aws_cdk.aws_ecr_assets-1.144.0-py3-none-any.whl", hash = "sha256:e226da9046db86cf1d70efc1147b08f129e7e767204eb63a033b5132eeef0d26"}, ] "aws-cdk.aws-efs" = [ - {file = "aws-cdk.aws-efs-1.130.0.tar.gz", hash = "sha256:8ed017fe4599bbfaa03dac74aa41cded39984813c8a6b14e280896aed0c8a39a"}, - {file = "aws_cdk.aws_efs-1.130.0-py3-none-any.whl", hash = "sha256:ccf15abb0711725620d478f7b53e58f2f6109b77f6c47c5878dc00d70e196827"}, + {file = "aws-cdk.aws-efs-1.144.0.tar.gz", hash = "sha256:47ac47fd80a5f9cb6b774c37f8171504ebf2cd67ff7b13326dd2a0cef9662805"}, + {file = "aws_cdk.aws_efs-1.144.0-py3-none-any.whl", hash = "sha256:02f233eb82111eb5c22c118e26801cde52122ff813ea1bfcc8fc0e708acb0e94"}, ] "aws-cdk.aws-events" = [ - {file = "aws-cdk.aws-events-1.130.0.tar.gz", hash = "sha256:6ee24457c50eeda8c9c241596cfa6b123bb50ea2138787fef3e4bb54e4b47f13"}, - {file = "aws_cdk.aws_events-1.130.0-py3-none-any.whl", hash = "sha256:df91c72843d9734a49017040090b1615be41de020906a57e6c708d860e8a4139"}, + {file = "aws-cdk.aws-events-1.144.0.tar.gz", hash = "sha256:6fd7ab316d3efff508e3dd232dacb73729deaf4a537f4b02e2540447f05c5519"}, + {file = "aws_cdk.aws_events-1.144.0-py3-none-any.whl", hash = "sha256:9f15ac7967ad3b03da819484112851b47b376b071b9fe70fe1e1432fa4564c5f"}, ] "aws-cdk.aws-glue" = [ - {file = "aws-cdk.aws-glue-1.130.0.tar.gz", hash = "sha256:4ddda00ad580ffe207f2241a3cc66ab6c5a225580a9daa6adcd03c3299017d9a"}, - {file = "aws_cdk.aws_glue-1.130.0-py3-none-any.whl", hash = "sha256:93f136d74b866619bd3aec2086b5a2c2b930acfaad7cc23cfa2f0b2a2eb85f90"}, + {file = "aws-cdk.aws-glue-1.144.0.tar.gz", hash = "sha256:f9abf47d096196e8a04caae969ef637d155f8d986b4f19149754bdb582a7ab4e"}, + {file = "aws_cdk.aws_glue-1.144.0-py3-none-any.whl", hash = "sha256:6c30c8b649fa82f217d0850111ab2335125c7087ef1ab6e464fd2bdadac70b4d"}, ] "aws-cdk.aws-iam" = [ - {file = "aws-cdk.aws-iam-1.130.0.tar.gz", hash = "sha256:d2bf02a2d3f2bd81c1b9598e7b4424b0dc0d4694b57338d7efac43a89fb6409c"}, - {file = "aws_cdk.aws_iam-1.130.0-py3-none-any.whl", hash = "sha256:3a3272745da9363177ebd8b138f42ce9407439f909ed9177c226e584022f4ff0"}, + {file = "aws-cdk.aws-iam-1.144.0.tar.gz", hash = "sha256:f9c46e54c3fc10bb4246b8df41d4470c74850d7a1ca9a8094e552e0adbde2964"}, + {file = "aws_cdk.aws_iam-1.144.0-py3-none-any.whl", hash = "sha256:e32ced31e07af61fd0bd7a50d75b72afc224c8fd106fc3c37a60a288120961ab"}, ] "aws-cdk.aws-kms" = [ - {file = "aws-cdk.aws-kms-1.130.0.tar.gz", hash = "sha256:1ece4b6753b0271d9164b32c0c94919e2f2a587677b19c554c2a990b5b0803b7"}, - {file = "aws_cdk.aws_kms-1.130.0-py3-none-any.whl", hash = "sha256:de50127ab5f5f3838b6e4e549696ccfcd2cf18f7edd50616f82b1a0ddcd10075"}, + {file = "aws-cdk.aws-kms-1.144.0.tar.gz", hash = "sha256:55cd1ee139de85747d6acc1911b80892888535f7a0b5bbf2a32c5cca52feb26c"}, + {file = "aws_cdk.aws_kms-1.144.0-py3-none-any.whl", hash = "sha256:ac27f11fe74ac6d3fcdf12f1c2514ddd7fad330b4cd08ba4baf4aa392fae2f4e"}, ] "aws-cdk.aws-lakeformation" = [ - {file = "aws-cdk.aws-lakeformation-1.130.0.tar.gz", hash = "sha256:bdf37b0047ed48c4fa70c5a9398b596f278a73abf4b912b6eb289fa8aeb96ca7"}, - {file = "aws_cdk.aws_lakeformation-1.130.0-py3-none-any.whl", hash = "sha256:5bcd04992577dc2b67d437e0d73b3367e3b57315859a5c9426f15501db049151"}, + {file = "aws-cdk.aws-lakeformation-1.144.0.tar.gz", hash = "sha256:c4da9098e61361c5b334d399282d8f21351548aa3fbbc47b76d1fd017100d11b"}, + {file = "aws_cdk.aws_lakeformation-1.144.0-py3-none-any.whl", hash = "sha256:66fdcb11f4bab3237d16c47d400c73ec01393333d644ba6d58a81ce9f0feb8ca"}, ] "aws-cdk.aws-lambda" = [ - {file = "aws-cdk.aws-lambda-1.130.0.tar.gz", hash = "sha256:c3ee7c637f1a590ead83e75803865f58c0c18193ff841d94b0a0b51ea1e9d6fb"}, - {file = "aws_cdk.aws_lambda-1.130.0-py3-none-any.whl", hash = "sha256:6c8dec3aad5d3900888aab52b0a844d3c05e94f977ff04ec26083302cc76edc8"}, + {file = "aws-cdk.aws-lambda-1.144.0.tar.gz", hash = "sha256:d653487b9c8bbd16063a1b61ec533d06fe8ab1b10d1eb367ff2057ffb983609e"}, + {file = "aws_cdk.aws_lambda-1.144.0-py3-none-any.whl", hash = "sha256:357c59f8eaad933b7f9903b46047488d5eab4d2f85fdb6dad4e1d55ea9dcbd8a"}, ] "aws-cdk.aws-logs" = [ - {file = "aws-cdk.aws-logs-1.130.0.tar.gz", hash = "sha256:d022ec78f953f1276d710e903ee75857fe86a05b1f44f1610ac4d52b8652ddfc"}, - {file = "aws_cdk.aws_logs-1.130.0-py3-none-any.whl", hash = "sha256:da8ff0e9ed334bb4bc34cac698ad46ae8e815c7e9018e3754c9a342b84f26bbb"}, + {file = "aws-cdk.aws-logs-1.144.0.tar.gz", hash = "sha256:5212b0adc155dd5fc244aaec8ffed74912bc4f06c0e0281d83d40e51e7e438a8"}, + {file = "aws_cdk.aws_logs-1.144.0-py3-none-any.whl", hash = "sha256:98253c9c5e1175866ea13886534ec3d091406be9386d01173388304bf5a26b71"}, +] +"aws-cdk.aws-neptune" = [ + {file = "aws-cdk.aws-neptune-1.144.0.tar.gz", hash = "sha256:254d2f9bade919f915d6ead2ec2a34364f3554086b51bad599cb973c07a39926"}, + {file = "aws_cdk.aws_neptune-1.144.0-py3-none-any.whl", hash = "sha256:81822ccb35b45d1e2cf905f9fd5dcf8108d63fa646767cdb0cd7a740eb1364f3"}, ] "aws-cdk.aws-opensearchservice" = [ - {file = "aws-cdk.aws-opensearchservice-1.130.0.tar.gz", hash = "sha256:4194f91d28b50a4dc7b97d773871798a79bd93774146cfb8d2fe0ad30030328b"}, - {file = "aws_cdk.aws_opensearchservice-1.130.0-py3-none-any.whl", hash = "sha256:b4bb3b0a80f883aeeae79417ef45c5fc1f46abd05dfa9c46bd02476d5083af39"}, + {file = "aws-cdk.aws-opensearchservice-1.144.0.tar.gz", hash = "sha256:70ca76b67e7731009cf14649992b98d7556becd728dd3c3ce946a38ecb776e1e"}, + {file = "aws_cdk.aws_opensearchservice-1.144.0-py3-none-any.whl", hash = "sha256:1624a09871302779ad4a130671c6254064bf59a1ad314ff7f3d07922543c328f"}, ] "aws-cdk.aws-rds" = [ - {file = "aws-cdk.aws-rds-1.130.0.tar.gz", hash = "sha256:316abaa5786703bf1459f538d8d1bcc02f5b4c75df320fe2e9d62821f92fa7f4"}, - {file = "aws_cdk.aws_rds-1.130.0-py3-none-any.whl", hash = "sha256:a781ca1b945f655797f06106eb72142be4d1d6b9278e707a29a7e75d7e8dea73"}, + {file = "aws-cdk.aws-rds-1.144.0.tar.gz", hash = "sha256:ad26c542c4fe6adb776d75b52f2891f90ace6adaa8b2ffc043eddf50dba86004"}, + {file = "aws_cdk.aws_rds-1.144.0-py3-none-any.whl", hash = "sha256:959ba156ac8ffcaf6421e164eb78d00feb7561518a7944e74f8a368735761e18"}, ] "aws-cdk.aws-redshift" = [ - {file = "aws-cdk.aws-redshift-1.130.0.tar.gz", hash = "sha256:7447af727af2ff2014aad2d04a96ef70ffc6e65142d575dffb762cd147067e06"}, - {file = "aws_cdk.aws_redshift-1.130.0-py3-none-any.whl", hash = "sha256:e60832a9a042eaeeb646769a40753a82b807dc1154df58c20d524010e361c5b0"}, + {file = "aws-cdk.aws-redshift-1.144.0.tar.gz", hash = "sha256:9bb00eb16c77f5a0c1fc01708b4b5220853a53d6fe962050a040790c833ba37e"}, + {file = "aws_cdk.aws_redshift-1.144.0-py3-none-any.whl", hash = "sha256:0340befcbabeec3f4d91cd6db4dd53bb410abf88c8c29ca48af897a4e3747531"}, ] "aws-cdk.aws-route53" = [ - {file = "aws-cdk.aws-route53-1.130.0.tar.gz", hash = "sha256:6d1a209505e794922718cbf2f8f432f8d51b305da63ad4f10008b8f1f535f526"}, - {file = "aws_cdk.aws_route53-1.130.0-py3-none-any.whl", hash = "sha256:270877be4a1469f84c3022300baba2b982cd1644b4ea01d65fb0522adcf9b822"}, + {file = "aws-cdk.aws-route53-1.144.0.tar.gz", hash = "sha256:fabc01b6976a8f755bd48142c6f166f32d5c87f7309b146cecd9a6dba86d8963"}, + {file = "aws_cdk.aws_route53-1.144.0-py3-none-any.whl", hash = "sha256:524dc3f4b676c75a6beaf3943fb8ac5392de918d55edc8f4938f33db09e1ee7d"}, ] "aws-cdk.aws-s3" = [ - {file = "aws-cdk.aws-s3-1.130.0.tar.gz", hash = "sha256:940bcb081783937e774cf4f44f77ba7a8211ebe9440cca2d7225b310f4272f79"}, - {file = "aws_cdk.aws_s3-1.130.0-py3-none-any.whl", hash = "sha256:9fac2a150adf92700c05a02c603d0ff1185894235443980fafc874354c380f52"}, + {file = "aws-cdk.aws-s3-1.144.0.tar.gz", hash = "sha256:88737f283e49ae970786d9c059bd50f43160acaeac510aec473dfda7ef5597f6"}, + {file = "aws_cdk.aws_s3-1.144.0-py3-none-any.whl", hash = "sha256:1d72191f899415de71a3640917be4f5da0a37ceeb4659a773b2a5a2fd8d1105e"}, ] "aws-cdk.aws-s3-assets" = [ - {file = "aws-cdk.aws-s3-assets-1.130.0.tar.gz", hash = "sha256:db33b348222895ad14cb9d52d5582b1e80d0e9ff008f8c10ea912499ab7c14f1"}, - {file = "aws_cdk.aws_s3_assets-1.130.0-py3-none-any.whl", hash = "sha256:01a5b0f2c759a88176929569c6f69d0efb8901452fe112cfd3b3f4782fec12ab"}, + {file = "aws-cdk.aws-s3-assets-1.144.0.tar.gz", hash = "sha256:f27be123c988fe1f1637a2352952be831ade5f6b490e5530007fd564bbfe8018"}, + {file = "aws_cdk.aws_s3_assets-1.144.0-py3-none-any.whl", hash = "sha256:5d329fac1be62240fae51b15dd8e26e78e962b088c44d5ccd112b8678a3aba13"}, ] "aws-cdk.aws-sam" = [ - {file = "aws-cdk.aws-sam-1.130.0.tar.gz", hash = "sha256:564877af10684b99a76d7ae83b888f9dfc1f7894caed81d5349a059f51430836"}, - {file = "aws_cdk.aws_sam-1.130.0-py3-none-any.whl", hash = "sha256:dbd38e5e52b5f94aff76bc18640e8ba11ae0d0b183867f747942c753935bf326"}, + {file = "aws-cdk.aws-sam-1.144.0.tar.gz", hash = "sha256:2c20bc68e51a024ce3dc0e0cabed79772f37d20a8de1b61e228461f4951d1aad"}, + {file = "aws_cdk.aws_sam-1.144.0-py3-none-any.whl", hash = "sha256:96563a6a64126096ecd60d2dc8dc36471f04a18ae9d7371532767020f18a8cc3"}, ] "aws-cdk.aws-secretsmanager" = [ - {file = "aws-cdk.aws-secretsmanager-1.130.0.tar.gz", hash = "sha256:96e52bd3e6523b22f1d60aadeb0b6f435a5276a1ec794e4cfe2294f8ac26259a"}, - {file = "aws_cdk.aws_secretsmanager-1.130.0-py3-none-any.whl", hash = "sha256:a929ef9fea760b37d5306a1ee9deeecbac2530ab2ea7ec1fc1085544e6af1ca0"}, + {file = "aws-cdk.aws-secretsmanager-1.144.0.tar.gz", hash = "sha256:d4f73db193f52f4f405b48b7f8be454011b11164ba7e81c212c1f89c078e5fde"}, + {file = "aws_cdk.aws_secretsmanager-1.144.0-py3-none-any.whl", hash = "sha256:d60af58aeb26a7f217d80e4e2d161a3692a196598f69840a4dde8f2979eb3895"}, ] "aws-cdk.aws-signer" = [ - {file = "aws-cdk.aws-signer-1.130.0.tar.gz", hash = "sha256:f453d608a491dd0ff7d97fa597f17480d3bf43a0eaedd975e0846bf03de0ab0d"}, - {file = "aws_cdk.aws_signer-1.130.0-py3-none-any.whl", hash = "sha256:10a5981156c83c8725f565931167b376db24c08d43b325a8ad0e4a10559b32df"}, + {file = "aws-cdk.aws-signer-1.144.0.tar.gz", hash = "sha256:bd0199b83178d5852b58670cc62e8a36c8a970bbd217825f77c8ded140725637"}, + {file = "aws_cdk.aws_signer-1.144.0-py3-none-any.whl", hash = "sha256:939f99d364a28758d478b9646b35b4e83d2a514f21bb1aa66742399077cb87ce"}, ] "aws-cdk.aws-sns" = [ - {file = "aws-cdk.aws-sns-1.130.0.tar.gz", hash = "sha256:a2494dd42513b870ef94c0f013e734473fb8a02042b21da5864e3b8bd6609963"}, - {file = "aws_cdk.aws_sns-1.130.0-py3-none-any.whl", hash = "sha256:7b6dfc5c50cdc0005caac683731772502a9d26d6ef415256f21746bef0b7b444"}, + {file = "aws-cdk.aws-sns-1.144.0.tar.gz", hash = "sha256:c261aa28669d036d58f9e6999ed979e6a1843258969536076a420bb5a3981828"}, + {file = "aws_cdk.aws_sns-1.144.0-py3-none-any.whl", hash = "sha256:ed4f3e464721aeb803e876037b26ad107f9754858b8cbf09152f5ae16649ebd4"}, ] "aws-cdk.aws-sqs" = [ - {file = "aws-cdk.aws-sqs-1.130.0.tar.gz", hash = "sha256:baef9bfc74c33ad5e9ff65a4d48477f68fb503950d58d21e9cc657e8a9914c0f"}, - {file = "aws_cdk.aws_sqs-1.130.0-py3-none-any.whl", hash = "sha256:bd40f528012fd38398dd7cc6a8c91c62da634e2e620ecfa6530ae43a5d1890b5"}, + {file = "aws-cdk.aws-sqs-1.144.0.tar.gz", hash = "sha256:4b54adbc1da6a91fd575845fa6e7fbca4de5e858df21caf81636086b256a00b3"}, + {file = "aws_cdk.aws_sqs-1.144.0-py3-none-any.whl", hash = "sha256:9b57c1a402865251df9f41f12341d2a38647b6bd0948c760a686d357b3edcf27"}, ] "aws-cdk.aws-ssm" = [ - {file = "aws-cdk.aws-ssm-1.130.0.tar.gz", hash = "sha256:2c0a2e400b82864233e76973020dc16e88afc35aa0ef4dd5250d0404e1236de0"}, - {file = "aws_cdk.aws_ssm-1.130.0-py3-none-any.whl", hash = "sha256:dd84d306f4794433b921f75081d3db41dfe6fdc6078bfa377a096a1457adc9a9"}, + {file = "aws-cdk.aws-ssm-1.144.0.tar.gz", hash = "sha256:7ead4b75103261a5356bacd9b70f7eb680e30b5d2f9cd7977b89f012c3919cd2"}, + {file = "aws_cdk.aws_ssm-1.144.0-py3-none-any.whl", hash = "sha256:87ff66213ebe0d5ea7adbc5909887c8bbe4024d88037fd5231523f81053f834f"}, ] "aws-cdk.cloud-assembly-schema" = [ - {file = "aws-cdk.cloud-assembly-schema-1.130.0.tar.gz", hash = "sha256:31231d1fa14037f2af0a0a27657c7e603103c876464868bb8a5731698dba9d7f"}, - {file = "aws_cdk.cloud_assembly_schema-1.130.0-py3-none-any.whl", hash = "sha256:3eadde99a914ca53e101e66a403b554537435a29e1954cb13e94cdc9305da48a"}, + {file = "aws-cdk.cloud-assembly-schema-1.144.0.tar.gz", hash = "sha256:838610bcbb098ea5f700a032ce3bad8b630295f72b2cbb67fe25ea413394049d"}, + {file = "aws_cdk.cloud_assembly_schema-1.144.0-py3-none-any.whl", hash = "sha256:ca29af10806f5bf33563daebb01ec221862c6981de200605ff911a8d70aec81a"}, ] "aws-cdk.core" = [ - {file = "aws-cdk.core-1.130.0.tar.gz", hash = "sha256:d07b98dad35b18481e46b92b6fde7061b76730ac9d1111849db321e519ebdc52"}, - {file = "aws_cdk.core-1.130.0-py3-none-any.whl", hash = "sha256:7b3f1d0e9f83263763694cfb814346c38984041226180fe298056670fa5a5bd9"}, + {file = "aws-cdk.core-1.144.0.tar.gz", hash = "sha256:165a20f2d5cc3d6a41d97b5dfd66f9b6e9021e6657c20d282486803195fe73b0"}, + {file = "aws_cdk.core-1.144.0-py3-none-any.whl", hash = "sha256:d179cb4f7deb5f68436c67efb025372e095c5da0659d113476a3635a5352f8d1"}, ] "aws-cdk.custom-resources" = [ - {file = "aws-cdk.custom-resources-1.130.0.tar.gz", hash = "sha256:c212447b64f79d3605db6e072d23acc6fa1135e5399162a8cd258bc1d22e03e2"}, - {file = "aws_cdk.custom_resources-1.130.0-py3-none-any.whl", hash = "sha256:07c8a6c99bfe53d251303a7cf50b109fa974ddfd2fdbd22f3e94534271a2f666"}, + {file = "aws-cdk.custom-resources-1.144.0.tar.gz", hash = "sha256:18639c531ff0c871c7394a728bdfc06dcb910ad6670811500be61f6c663f7b2c"}, + {file = "aws_cdk.custom_resources-1.144.0-py3-none-any.whl", hash = "sha256:b30702665d7810118fde9ef86541a9c8a537d6975dcfca24284cc7b2acb37fe2"}, ] "aws-cdk.cx-api" = [ - {file = "aws-cdk.cx-api-1.130.0.tar.gz", hash = "sha256:3640cdc3c34566bbd0f32fd899fd5ea969d266d0efcd14f67784e557d2c7192c"}, - {file = "aws_cdk.cx_api-1.130.0-py3-none-any.whl", hash = "sha256:26b425e11e0718f531b6578e0621f141089ec1946ccfa124f929ae932f8340a6"}, + {file = "aws-cdk.cx-api-1.144.0.tar.gz", hash = "sha256:ecfe58cee0fbad6ee225b1658f8b717e8df5c0edb8e05b2ed905e3a4c0fe8b99"}, + {file = "aws_cdk.cx_api-1.144.0-py3-none-any.whl", hash = "sha256:eaf6730a615bf275af3cd283708c7bafa731cb32279baf253f35a42d4735c2fa"}, ] "aws-cdk.region-info" = [ - {file = "aws-cdk.region-info-1.130.0.tar.gz", hash = "sha256:f5534c3c02cc25215cca2d74aee4dc70cd34b35d86550415a085db65851b135e"}, - {file = "aws_cdk.region_info-1.130.0-py3-none-any.whl", hash = "sha256:2d4110779dd87f405270bfb31c73f315898698af04ec23b8069cc444d0bd896e"}, + {file = "aws-cdk.region-info-1.144.0.tar.gz", hash = "sha256:83fd5b133a8ce0d7a39da3d8c66ee90cf139fdfe825ea14af7ef52dbd4c944db"}, + {file = "aws_cdk.region_info-1.144.0-py3-none-any.whl", hash = "sha256:5236ede32213866591fe3f96484913654df350d7f6633b2b1d61d6ca5b35bde5"}, ] cattrs = [ {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, @@ -935,8 +978,8 @@ importlib-resources = [ {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] jsii = [ - {file = "jsii-1.42.0-py3-none-any.whl", hash = "sha256:29a4c87c8e1ad7eb67b65b03775f37bdd2212088a1eb854e84f5b541b9eaceb4"}, - {file = "jsii-1.42.0.tar.gz", hash = "sha256:44a1874464c3c9b48417523d5a4790ee792dab6e6f522bc6e6e2c84e42417323"}, + {file = "jsii-1.52.1-py3-none-any.whl", hash = "sha256:c1477f17275d62fbda28ea25c1f0a6cd1d95c1b73dda70835f33e34f0b70ac52"}, + {file = "jsii-1.52.1.tar.gz", hash = "sha256:32f886e99c06a23943986e9580f553860aabeb91dc3a3520d5b76fef1e631c04"}, ] publication = [ {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, diff --git a/test_infra/pyproject.toml b/test_infra/pyproject.toml index 1f58359e4..b5e030255 100644 --- a/test_infra/pyproject.toml +++ b/test_infra/pyproject.toml @@ -20,3 +20,4 @@ python = ">=3.6.2, <3.11" "aws-cdk.aws-ssm" = "^1.124.0" "aws-cdk.aws-opensearchservice" = "^1.124.0" "aws-cdk.aws-lakeformation" = "^1.124.0" +"aws-cdk.aws-neptune" = "^1.144.0" diff --git a/test_infra/stacks/databases_stack.py b/test_infra/stacks/databases_stack.py index 0f7c0454d..8c0cde255 100644 --- a/test_infra/stacks/databases_stack.py +++ b/test_infra/stacks/databases_stack.py @@ -11,6 +11,7 @@ from aws_cdk import aws_secretsmanager as secrets from aws_cdk import aws_ssm as ssm from aws_cdk import core as cdk +from aws_cdk import aws_neptune as neptune class DatabasesStack(cdk.Stack): # type: ignore @@ -40,6 +41,7 @@ def __init__( self._setup_mysql() self._setup_mysql_serverless() self._setup_sqlserver() + self._setup_neptune() def _set_db_infra(self) -> None: self.db_username = "test" @@ -562,3 +564,16 @@ def _setup_sqlserver(self) -> None: cdk.CfnOutput(self, "SqlServerPort", value=str(port)) cdk.CfnOutput(self, "SqlServerDatabase", value=database) cdk.CfnOutput(self, "SqlServerSchema", value=schema) + + + def _setup_neptune(self, iam_enabled=False, port=8182) -> None: + cluster = neptune.DatabaseCluster(self, "DataWrangler", + vpc=self.vpc, + instance_type=neptune.InstanceType.R5_LARGE, + iam_authentication=iam_enabled + ) + + cdk.CfnOutput(self, "NeptuneClusterEndpoint", value=cluster.cluster_endpoint.hostname) + cdk.CfnOutput(self, "NeptuneReaderEndpoint", value=cluster.cluster_read_endpoint.hostname) + cdk.CfnOutput(self, "NeptunePort", value=str(port)) + cdk.CfnOutput(self, "NeptuneIAMEnabled", value=str(iam_enabled)) \ No newline at end of file diff --git a/tests/test_neptune.py b/tests/test_neptune.py index e83e66a0d..e7d09e0c5 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -1,7 +1,6 @@ import logging from typing import Any, Dict -import boto3 import pandas as pd import pytest # type: ignore import uuid @@ -17,7 +16,6 @@ @pytest.fixture(scope="session") def cloudformation_outputs(): outputs = {} - outputs['cluster_resource_id']='XXX' outputs['endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' outputs['read_endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' outputs['port'] = 8182 @@ -162,6 +160,56 @@ def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: batch_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 + + +def test_sparql_write_triples(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + initial_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") + + data = [_create_dummy_triple(), _create_dummy_triple(), _create_dummy_triple()] + df = pd.DataFrame(data) + res = wr.neptune.to_rdf_graph(client, df) + assert res + + final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") + assert len(final_df.index) == len(initial_df.index) + 3 + + # check to make sure batch addition of edges works + data=[] + for i in range(0, 50): + data.append(_create_dummy_triple()) + + df = pd.DataFrame(data) + res = wr.neptune.to_rdf_graph(client, df) + assert res + + batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") + assert len(batch_df.index) == len(final_df.index) + 50 + +def test_sparql_write_quads(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + initial_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") + + data = [_create_dummy_quad(), _create_dummy_quad(), _create_dummy_quad()] + df = pd.DataFrame(data) + res = wr.neptune.to_rdf_graph(client, df) + assert res + + final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") + assert len(final_df.index) == len(initial_df.index) + 3 + + # check to make sure batch addition of edges works + data=[] + for i in range(0, 50): + data.append(_create_dummy_quad()) + + df = pd.DataFrame(data) + res = wr.neptune.to_rdf_graph(client, df) + assert res + + batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") + assert len(batch_df.index) == len(final_df.index) + 50 + def _create_dummy_vertex() -> Dict[str, Any]: data = dict() @@ -180,4 +228,17 @@ def _create_dummy_edge() -> Dict[str, Any]: data['~from']=uuid.uuid4() data['int'] = random.randint(0, 1000) data['str'] = ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + return data + + +def _create_dummy_triple() -> Dict[str, Any]: + data = dict() + data['s']='foo' + data['p']=uuid.uuid4() + data['o'] = random.randint(0, 1000) + return data + +def _create_dummy_quad() -> Dict[str, Any]: + data = _create_dummy_triple() + data['g']='bar' return data \ No newline at end of file From d768d85cfeeefc2655b8f6e849f699cf14479baa Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Thu, 10 Feb 2022 14:56:51 -0900 Subject: [PATCH 12/32] [skip ci] Readded nested asyncio in order for it to work in Jupyter as well as added update option for property graph data --- awswrangler/neptune/client.py | 2 ++ awswrangler/neptune/neptune.py | 25 +++++++++++++++++++++---- poetry.lock | 2 +- pyproject.toml | 1 + tests/test_neptune.py | 15 +++++++++++++++ 5 files changed, 40 insertions(+), 5 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 6721692a9..d6bc6bd06 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -6,6 +6,7 @@ from typing import Dict, Optional, Any from gremlin_python.driver import client from awswrangler.neptune.gremlin_parser import GremlinParser +import nest_asyncio import logging @@ -106,6 +107,7 @@ def write_gremlin(self, query) -> bool: def _execute_gremlin(self, query) -> Dict[str, Any]: try: + nest_asyncio.apply() uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin' request = self._prepare_request('GET', uri) ws_url = f'{WS_PROTOCOL}://{self.host}:{self.port}/gremlin' diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 49cabda66..5d461cfe8 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -153,9 +153,12 @@ def to_property_graph( # check if ~id and ~label column exist and if not throw error g = Graph().traversal() is_edge_df = False - if '~id' in df.columns and '~label' in df.columns: - if '~to' in df.columns and '~from' in df.columns: - is_edge_df = True + is_update_df=True + if '~id' in df.columns: + if '~label' in df.columns: + is_update_df = False + if '~to' in df.columns and '~from' in df.columns: + is_edge_df = True else: raise exceptions.InvalidArgumentValue( "Dataframe must contain at least a ~id and a ~label column to be saved to Amazon Neptune" @@ -164,7 +167,9 @@ def to_property_graph( # Loop through items in the DF for (index, row) in df.iterrows(): # build up a query - if is_edge_df: + if is_update_df: + g = _build_gremlin_update(g, row) + elif is_edge_df: g = _build_gremlin_insert_edges(g, row.to_dict()) else: g = _build_gremlin_insert_vertices(g, row.to_dict()) @@ -177,6 +182,18 @@ def to_property_graph( return _run_gremlin_insert(client, g) +def _build_gremlin_update(g: GraphTraversalSource, row: Dict) -> str: + g = g.V(str(row['~id'])) + for (column, value) in row.items(): + if column not in ['~id', '~label']: + if type(value) is list and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + def _build_gremlin_insert_vertices(g: GraphTraversalSource, row: Dict) -> str: g = (g.V(str(row['~id'])). fold(). diff --git a/poetry.lock b/poetry.lock index fff454d84..0c3527b42 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2276,7 +2276,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "b4872d5d0dd8e77eb7e6039feef8d2135bb496fe7d82c8341f8df0f12e89e8b0" +content-hash = "d1a32ec225985689e4adc9d5393a557b295dadabd1afa710c6e21326c199436d" [metadata.files] aenum = [ diff --git a/pyproject.toml b/pyproject.toml index d5e0765e7..645ba9e7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ xlwt = { version = "^1.3.0", python = "~3.6" } pyodbc = { version = "~4.0.32", optional = true } gremlinpython = "^3.5.2" backoff = "^1.11.1" +nest-asyncio = "^1.5.4" [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/tests/test_neptune.py b/tests/test_neptune.py index e7d09e0c5..04fe6e36b 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -114,6 +114,20 @@ def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert df.shape == (2,3) +def test_gremlin_write_updates(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + id=uuid.uuid4() + wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{str(id)}')") + + data=[{'~id': id, 'age': 50}] + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df) + assert res + + final_df = wr.neptune.execute_gremlin(client, f"g.V('{str(id)}').values('age')") + assert final_df.iloc[0][0] == 50 + + def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) initial_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") @@ -137,6 +151,7 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any batch_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 + def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) initial_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") From 42575465b8551e71d014bc3a077583321b614946 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 11 Feb 2022 16:07:27 -0900 Subject: [PATCH 13/32] Working version of all MVP features as well as changes to make validation pass locally --- awswrangler/neptune/__init__.py | 22 +-- awswrangler/neptune/_utils.py | 73 +++---- awswrangler/neptune/client.py | 215 +++++++++++++------- awswrangler/neptune/gremlin_parser.py | 37 ++-- awswrangler/neptune/neptune.py | 274 +++++++++++++------------- poetry.lock | 46 ++++- pyproject.toml | 1 + test_infra/stacks/databases_stack.py | 17 +- tests/test_neptune.py | 146 +++++++------- tests/test_neptune_parsing.py | 192 +++++++++--------- 10 files changed, 577 insertions(+), 446 deletions(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 758fe3263..0819559e2 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,15 +1,11 @@ """Utilities Module for Amazon Neptune.""" +from awswrangler.neptune.neptune import ( + connect, + execute_gremlin, + execute_opencypher, + execute_sparql, + to_property_graph, + to_rdf_graph, +) -from awswrangler.neptune.neptune import execute_gremlin, execute_opencypher, execute_sparql, to_property_graph, \ - to_rdf_graph, connect -from awswrangler.neptune.gremlin_parser import GremlinParser -from awswrangler.neptune.client import NeptuneClient - -__all__ = [ - "execute_gremlin", - "execute_opencypher", - "execute_sparql", - "to_property_graph", - "to_rdf_graph", - "connect" -] +__all__ = ["execute_gremlin", "execute_opencypher", "execute_sparql", "to_property_graph", "to_rdf_graph", "connect"] diff --git a/awswrangler/neptune/_utils.py b/awswrangler/neptune/_utils.py index 241a5bad6..a2e744ab4 100644 --- a/awswrangler/neptune/_utils.py +++ b/awswrangler/neptune/_utils.py @@ -1,21 +1,23 @@ -from awswrangler.neptune.client import NeptuneClient -import pandas as pd -from gremlin_python.process.graph_traversal import GraphTraversalSource -from gremlin_python.process.traversal import T -from gremlin_python.process.graph_traversal import __ -from gremlin_python.structure.graph import Graph +"""Amazon Neptune Utils Module (PRIVATE)""" -from gremlin_python.process.traversal import Cardinality -from gremlin_python.process.translator import Translator -from typing import Dict, Any +import logging from enum import Enum +from typing import Any -import logging +import pandas as pd +from gremlin_python.process.graph_traversal import GraphTraversalSource, __ +from gremlin_python.process.translator import Translator +from gremlin_python.process.traversal import Cardinality, T +from gremlin_python.structure.graph import Graph + +from awswrangler.neptune.client import NeptuneClient _logger: logging.Logger = logging.getLogger(__name__) class WriteDFType(Enum): + """Dataframe type enum""" + VERTEX = 1 EDGE = 2 UPDATE = 3 @@ -39,7 +41,7 @@ def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, g = Graph().traversal() # Loop through items in the DF for (index, row) in df.iterrows(): - # build up a query + # build up a query if mode == WriteDFType.EDGE: g = _build_gremlin_edges(g, row.to_dict()) elif mode == WriteDFType.VERTEX: @@ -58,62 +60,51 @@ def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: - translator = Translator('g') + translator = Translator("g") s = translator.translate(g.bytecode) - s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality + s = s.replace("Cardinality.", "") # hack to fix parser error for set cardinality _logger.debug(s) res = client.write_gremlin(s) return res -def _build_gremlin_update(g: GraphTraversalSource, row: Dict) -> GraphTraversalSource: - g = g.V(str(row['~id'])) +def _build_gremlin_update(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: + g = g.V(str(row["~id"])) g = _build_gremlin_properties(g, row) return g -def _build_gremlin_vertices(g: GraphTraversalSource, row: Dict) -> GraphTraversalSource: - g = (g.V(str(row['~id'])). - fold(). - coalesce( - __.unfold(), - __.addV(row['~label']).property(T.id, str(row['~id']))) - ) +def _build_gremlin_vertices(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: + g = g.V(str(row["~id"])).fold().coalesce(__.unfold(), __.addV(row["~label"]).property(T.id, str(row["~id"]))) g = _build_gremlin_properties(g, row) return g def _build_gremlin_edges(g: GraphTraversalSource, row: pd.Series) -> GraphTraversalSource: - g = (g.V(str(row['~from'])). - fold(). - coalesce( - __.unfold(), - _build_gremlin_vertices( - __, {"~id": row['~from'], "~label": "Vertex"})). - addE(row['~label']). - to( - __.V(str(row['~to'])). - fold(). - coalesce( - __.unfold(), - _build_gremlin_vertices( - __, {"~id": row['~to'], "~label": "Vertex"}))) + g = ( + g.V(str(row["~from"])) + .fold() + .coalesce(__.unfold(), _build_gremlin_vertices(__, {"~id": row["~from"], "~label": "Vertex"})) + .addE(row["~label"]) + .to( + __.V(str(row["~to"])) + .fold() + .coalesce(__.unfold(), _build_gremlin_vertices(__, {"~id": row["~to"], "~label": "Vertex"})) + ) ) g = _build_gremlin_properties(g, row) return g -def _build_gremlin_properties(g, row): +def _build_gremlin_properties(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: for (column, value) in row.items(): - if column not in ['~id', '~label', '~to', '~from']: - if type(value) is list and len(value) > 0: + if column not in ["~id", "~label", "~to", "~from"]: + if isinstance(value, list) and len(value) > 0: for item in value: g = g.property(Cardinality.set_, column, item) elif not pd.isna(value) and not pd.isnull(value): g = g.property(column, value) return g - - diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index d6bc6bd06..807e0c3b2 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -1,26 +1,38 @@ -from awswrangler import exceptions +"""Amazon NeptuneClient Module""" + +import logging +from typing import Any, Optional + import boto3 +import nest_asyncio +import requests from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest -import requests -from typing import Dict, Optional, Any from gremlin_python.driver import client -from awswrangler.neptune.gremlin_parser import GremlinParser -import nest_asyncio +from SPARQLWrapper import SPARQLWrapper -import logging +from awswrangler import exceptions +from awswrangler.neptune.gremlin_parser import GremlinParser _logger: logging.Logger = logging.getLogger(__name__) DEFAULT_PORT = 8182 -NEPTUNE_SERVICE_NAME = 'neptune-db' -HTTP_PROTOCOL = 'https' -WS_PROTOCOL = 'wss' +NEPTUNE_SERVICE_NAME = "neptune-db" +HTTP_PROTOCOL = "https" +WS_PROTOCOL = "wss" class NeptuneClient: - def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = False, - boto3_session: Optional[boto3.Session] = None, region: Optional[str] = None): + """This object represents a Neptune cluster connection.""" + + def __init__( + self, + host: str, + port: int = DEFAULT_PORT, + iam_enabled: bool = False, + boto3_session: Optional[boto3.Session] = None, + region: Optional[str] = None, + ): self.host = host self.port = port self.iam_enabled = iam_enabled @@ -31,7 +43,6 @@ def __init__(self, host: str, port: int = DEFAULT_PORT, iam_enabled: bool = Fals self.region = region self._http_session = requests.Session() - def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -46,23 +57,38 @@ def __ensure_session(session: boto3.Session = None) -> boto3.Session: return session elif boto3.DEFAULT_SESSION: return boto3.DEFAULT_SESSION - else: - return boto3.Session() - - def _prepare_request(self, method, url, *, data=None, params=None, headers=None, - service=NEPTUNE_SERVICE_NAME) -> requests.PreparedRequest: + return boto3.Session() + + def _prepare_request( + self, + method: str, + url: str, + *, + data: Any = None, + params: Any = None, + headers: Any = None, + service: str = NEPTUNE_SERVICE_NAME, + ) -> requests.PreparedRequest: request = requests.Request(method=method, url=url, data=data, params=params, headers=headers) if self.boto3_session is not None: - aws_request = self._get_aws_request(method=method, url=url, data=data, params=params, headers=headers, - service=service) + aws_request = self._get_aws_request( + method=method, url=url, data=data, params=params, headers=headers, service=service + ) request.headers = dict(aws_request.headers) return request.prepare() - - def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, - service=NEPTUNE_SERVICE_NAME) -> AWSRequest: + def _get_aws_request( + self, + method: str, + url: str, + *, + data: Any = None, + params: Any = None, + headers: Any = None, + service: str = NEPTUNE_SERVICE_NAME, + ) -> AWSRequest: req = AWSRequest(method=method, url=url, data=data, params=params, headers=headers) if self.iam_enabled: credentials = self.boto3_session.get_credentials() @@ -70,10 +96,14 @@ def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, frozen_creds = credentials.get_frozen_credentials() except AttributeError: print("Could not find valid IAM credentials in any the following locations:\n") - print("env, assume-role, assume-role-with-web-identity, sso, shared-credential-file, custom-process, " - "config-file, ec2-credentials-file, boto-config, container-role, iam-role\n") - print("Go to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html for more " - "details on configuring your IAM credentials.") + print( + "env, assume-role, assume-role-with-web-identity, sso, shared-credential-file, custom-process, " + "config-file, ec2-credentials-file, boto-config, container-role, iam-role\n" + ) + print( + "Go to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html for more " + "details on configuring your IAM credentials." + ) return req SigV4Auth(frozen_creds, service, self.region).add_auth(req) prepared_iam_req = req.prepare() @@ -81,37 +111,61 @@ def _get_aws_request(self, method, url, *, data=None, params=None, headers=None, else: return req + def read_opencypher(self, query: str, headers: Any = None) -> Any: + """Executes the provided openCypher query - def read_opencypher(self, query: str, headers: Dict[str, Any] = None) -> Dict[str, Any]: + Args: + query (str): The query to execute + headers (Any, optional): Any additional headers that should be associated with the query. Defaults to None. + + Returns: + Any: [description] The result of the query + """ if headers is None: headers = {} - if 'content-type' not in headers: - headers['content-type'] = 'application/x-www-form-urlencoded' + if "content-type" not in headers: + headers["content-type"] = "application/x-www-form-urlencoded" - url = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/openCypher' - data = { - 'query': query - } + url = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/openCypher" + data = {"query": query} - req = self._prepare_request('POST', url, data=data, headers=headers) + req = self._prepare_request("POST", url, data=data, headers=headers) res = self._http_session.send(req) - return res.json()['results'] + _logger.debug(res) + return res.json()["results"] + + def read_gremlin(self, query: str) -> Any: + """Executes the provided Gremlin traversal and returns the results - def read_gremlin(self, query) -> Any: + Args: + query (str): The Gremlin query + + Returns: + Any: [description] + """ return self._execute_gremlin(query) - def write_gremlin(self, query) -> bool: - self._execute_gremlin(query) + def write_gremlin(self, query: str) -> bool: + """Executes a Gremlin write query + + Args: + query (str): The query to execute + + Returns: + bool: The success of the Gremlin write query + """ + res = self._execute_gremlin(query) + _logger.debug(res) return True - def _execute_gremlin(self, query) -> Dict[str, Any]: + def _execute_gremlin(self, query: str) -> Any: try: nest_asyncio.apply() - uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin' - request = self._prepare_request('GET', uri) - ws_url = f'{WS_PROTOCOL}://{self.host}:{self.port}/gremlin' - c = client.Client(ws_url, 'g', headers=dict(request.headers)) + uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" + request = self._prepare_request("GET", uri) + ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" + c = client.Client(ws_url, "g", headers=dict(request.headers)) result = c.submit(query) future_results = result.all() results = future_results.result() @@ -119,41 +173,64 @@ def _execute_gremlin(self, query) -> Dict[str, Any]: return GremlinParser.gremlin_results_to_dict(results) except Exception as e: c.close() + _logger.error(e) raise e - def read_sparql(self, query, headers: Dict[str, Any] = None) -> Dict[str, Any]: - return self._execute_sparql(query, headers) - - - def write_sparql(self, query, headers: Dict[str, Any] = None) -> Dict[str, Any]: - self._execute_sparql(query, headers, is_update=True) + def read_sparql(self, query: str, headers: Any = None) -> Any: + """Executes the given query and returns the results + + Args: + query ([type]): The SPARQL query to execute + headers (Any, optional): Any additional headers to include with the request. Defaults to None. + + Returns: + Any: [description] + """ + res = self._execute_sparql(query, headers) + _logger.debug(res) + return res + + def write_sparql(self, query: str, headers: Any = None) -> bool: + """Executes the specified SPARQL write statements + + Args: + query ([type]): The SPARQL query to execute + headers (Any, optional): Any additional headers to include with the request. Defaults to None. + + Returns: + bool: The success of the query + """ + self._execute_sparql(query, headers) return True - - def _execute_sparql(self, query, headers, is_update=False): + def _execute_sparql(self, query: str, headers: Any) -> Any: if headers is None: headers = {} - if is_update: - data = {'update': query} + s = SPARQLWrapper("") + s.setQuery(query) + query_type = s.queryType.upper() + if query_type in ["SELECT", "CONSTRUCT", "ASK", "DESCRIBE"]: + data = {"query": query} else: - data = {'query': query} - - if 'content-type' not in headers: - headers['content-type'] = 'application/x-www-form-urlencoded' + data = {"update": query} - uri = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/sparql' - req = self._prepare_request('POST', uri, data=data, headers=headers) - res = self._http_session.send(req) - return res + if "content-type" not in headers: + headers["content-type"] = "application/x-www-form-urlencoded" - def status(self): - url = f'{HTTP_PROTOCOL}://{self.host}:{self.port}/status' - req = self._prepare_request('GET', url, data='') + uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/sparql" + req = self._prepare_request("POST", uri, data=data, headers=headers) res = self._http_session.send(req) - if res.status_code == 200: - return res.json() - else: - _logger.error("Error connecting to Amazon Neptune cluster. Please verify your connection details") - raise ConnectionError(res.status_code) + _logger.debug(res) + return res.json() + def status(self) -> Any: + """Returns the status of the Neptune cluster + + Returns: + str: The result of the call to the status API for the Neptune cluster + """ + url = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/status" + req = self._prepare_request("GET", url, data="") + res = self._http_session.send(req) + return res.json() diff --git a/awswrangler/neptune/gremlin_parser.py b/awswrangler/neptune/gremlin_parser.py index 76873c21e..60c4eb8e6 100644 --- a/awswrangler/neptune/gremlin_parser.py +++ b/awswrangler/neptune/gremlin_parser.py @@ -1,19 +1,26 @@ -from typing import Dict, Any +"""Amazon Neptune GremlinParser Module (PRIVATE).""" +from typing import Any, Dict -from gremlin_python.structure.graph import Path -from gremlin_python.structure.graph import Vertex -from gremlin_python.structure.graph import Edge -from gremlin_python.structure.graph import VertexProperty -from gremlin_python.structure.graph import Property +from gremlin_python.structure.graph import Edge, Path, Property, Vertex, VertexProperty class GremlinParser: + """This represents a parser for returning Gremlin results as a dictionary.""" + @staticmethod - def gremlin_results_to_dict(result) -> Dict[str, Any]: + def gremlin_results_to_dict(result: Any) -> Any: + """Takes a Gremlin ResultSet and returns a dictionary + + Args: + result (Any): The Gremlin resultset to convert + + Returns: + Any: A dictionary of the results + """ res = [] # For lists or paths unwind them - if isinstance(result, list) or isinstance(result, Path): + if isinstance(result, (list, Path)): for x in result: res.append(GremlinParser._parse_dict(x)) @@ -27,19 +34,18 @@ def gremlin_results_to_dict(result) -> Dict[str, Any]: return res @staticmethod - def _parse_dict(data) -> Dict[str, Any]: - d = dict() + def _parse_dict(data: Any) -> Any: + d: Dict[str, Any] = {} # If this is a list or Path then unwind it - if isinstance(data, list) or isinstance(data, Path): + if isinstance(data, (list, Path)): res = [] for x in data: res.append(GremlinParser._parse_dict(x)) return res # If this is an element then make it a dictionary - elif isinstance(data, Vertex) or isinstance(data, Edge) or isinstance(data, VertexProperty) or isinstance(data, - Property): + elif isinstance(data, (Vertex, Edge, VertexProperty, Property)): data = data.__dict__ # If this is a scalar then create a Map with it @@ -48,7 +54,7 @@ def _parse_dict(data) -> Dict[str, Any]: for (k, v) in data.items(): # If the key is a Vertex or an Edge do special processing - if isinstance(k, Vertex) or isinstance(k, Edge): + if isinstance(k, (Vertex, Edge)): k = k.id # If the value is a list do special processing to make it a scalar if the list is of length 1 @@ -58,7 +64,6 @@ def _parse_dict(data) -> Dict[str, Any]: d[k] = v # If the value is a Vertex or Edge do special processing - if isinstance(d[k], Vertex) or isinstance(d[k], Edge) or isinstance(d[k], VertexProperty) or isinstance( - d[k], Property): + if isinstance(data, (Vertex, Edge, VertexProperty, Property)): d[k] = d[k].__dict__ return d diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 5d461cfe8..245ac9c56 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,24 +1,21 @@ -from awswrangler.neptune.client import NeptuneClient -from typing import Dict, Any +"""Amazon Neptune Module""" + +import logging +from typing import Any + import pandas as pd -from awswrangler import exceptions -from gremlin_python.process.graph_traversal import GraphTraversalSource +from gremlin_python.process.graph_traversal import GraphTraversalSource, __ from gremlin_python.process.translator import Translator -from gremlin_python.process.traversal import T -from gremlin_python.process.graph_traversal import __ -from gremlin_python.process.traversal import Cardinality +from gremlin_python.process.traversal import Cardinality, T from gremlin_python.structure.graph import Graph -import logging +from awswrangler import exceptions +from awswrangler.neptune.client import NeptuneClient _logger: logging.Logger = logging.getLogger(__name__) -def execute_gremlin( - client: NeptuneClient, - query: str, - **kwargs: str -) -> pd.DataFrame: +def execute_gremlin(client: NeptuneClient, query: str) -> pd.DataFrame: """Return results of a Gremlin traversal as pandas dataframe. Parameters @@ -38,18 +35,15 @@ def execute_gremlin( Run a Gremlin Query >>> import awswrangler as wr - >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=False, iam_enabled=False) + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) >>> df = wr.neptune.execute_gremlin(client, "g.V().limit(1)") """ - results = client.read_gremlin(query, **kwargs) + results = client.read_gremlin(query) df = pd.DataFrame.from_records(results) return df -def execute_opencypher( - client: NeptuneClient, - query: str -) -> pd.DataFrame: +def execute_opencypher(client: NeptuneClient, query: str) -> pd.DataFrame: """Return results of a openCypher traversal as pandas dataframe. Parameters @@ -69,7 +63,7 @@ def execute_opencypher( Run an openCypher query >>> import awswrangler as wr - >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, ssl=True, iam_enabled=False) + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) >>> resp = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 1") """ resp = client.read_opencypher(query) @@ -77,10 +71,7 @@ def execute_opencypher( return df -def execute_sparql( - client: NeptuneClient, - query: str -) -> pd.DataFrame: +def execute_sparql(client: NeptuneClient, query: str) -> pd.DataFrame: """Return results of a SPARQL query as pandas dataframe. Parameters @@ -100,33 +91,31 @@ def execute_sparql( Run a SPARQL query >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) >>> df = wr.neptune.execute_sparql(client, "PREFIX foaf: SELECT ?name WHERE { ?person foaf:name ?name . - }") """ - resp = client.read_sparql(query) - data = resp.json() - df = pd.DataFrame(data['results']['bindings']) - df.applymap(lambda x: x['value']) - return df + data = client.read_sparql(query) + if "results" in data and "bindings" in data["results"]: + df = pd.DataFrame(data["results"]["bindings"]) + df.applymap(lambda x: x["value"]) + return df + else: + return pd.DataFrame(data) + +def to_property_graph(client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50) -> bool: + """Write records stored in a DataFrame into Amazon Neptune. -def to_property_graph( - client: NeptuneClient, - df: pd.DataFrame, - batch_size: int = 50 -) -> None: - """Write records stored in a DataFrame into Amazon Neptune. - - If writing to a property graph then DataFrames for vertices and edges must be written separately. - DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. - If the ~id column does not exist, the specified id does not exists, or is empty then a new vertex will be added. - If no ~label column exists an exception will be thrown. - DataFrames for edges must have a ~id, ~label, ~to, and ~from column. If the ~id column does not exist, - the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column exists an exception will be thrown. + If writing to a property graph then DataFrames for vertices and edges must be written separately. + DataFrames for vertices must have a ~label column with the label and a ~id column for the vertex id. + If the ~id column does not exist, the specified id does not exists, or is empty then a new vertex will be added. + If no ~label column exists an exception will be thrown. + DataFrames for edges must have a ~id, ~label, ~to, and ~from column. If the ~id column does not exist + the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column + exists an exception will be thrown. Parameters ---------- @@ -142,10 +131,10 @@ def to_property_graph( Examples -------- - Writing to Amazon Neptune + Writing to Amazon Neptune >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) >>> wr.neptune.gremlin.to_property_graph( ... df=df ... ) @@ -153,11 +142,11 @@ def to_property_graph( # check if ~id and ~label column exist and if not throw error g = Graph().traversal() is_edge_df = False - is_update_df=True - if '~id' in df.columns: - if '~label' in df.columns: + is_update_df = True + if "~id" in df.columns: + if "~label" in df.columns: is_update_df = False - if '~to' in df.columns and '~from' in df.columns: + if "~to" in df.columns and "~from" in df.columns: is_edge_df = True else: raise exceptions.InvalidArgumentValue( @@ -166,7 +155,7 @@ def to_property_graph( # Loop through items in the DF for (index, row) in df.iterrows(): - # build up a query + # build up a query if is_update_df: g = _build_gremlin_update(g, row) elif is_edge_df: @@ -182,86 +171,34 @@ def to_property_graph( return _run_gremlin_insert(client, g) -def _build_gremlin_update(g: GraphTraversalSource, row: Dict) -> str: - g = g.V(str(row['~id'])) - for (column, value) in row.items(): - if column not in ['~id', '~label']: - if type(value) is list and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) - - return g - -def _build_gremlin_insert_vertices(g: GraphTraversalSource, row: Dict) -> str: - g = (g.V(str(row['~id'])). - fold(). - coalesce( - __.unfold(), - __.addV(row['~label']).property(T.id, str(row['~id']))) - ) - for (column, value) in row.items(): - if column not in ['~id', '~label']: - if type(value) is list and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) - - return g - - -def _build_gremlin_insert_edges(g: GraphTraversalSource, row: pd.Series) -> str: - g = (g.V(str(row['~from'])). - fold(). - coalesce( - __.unfold(), - _build_gremlin_insert_vertices(__, {"~id": row['~from'], "~label": "Vertex"})). - addE(row['~label']). - to(__.V(str(row['~to'])).fold().coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row['~to'], - "~label": "Vertex"}))) - ) - for (column, value) in row.items(): - if column not in ['~id', '~label', '~to', '~from']: - if type(value) is list and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) - - return g - - -def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: - translator = Translator('g') - s = translator.translate(g.bytecode) - s = s.replace('Cardinality.', '') # hack to fix parser error for set cardinality - _logger.debug(s) - res = client.write_gremlin(s) - return res - - def to_rdf_graph( - client: NeptuneClient, - df: pd.DataFrame, - batch_size: int = 50, - subject_column:str = 's', - predicate_column:str = 'p', - object_column:str = 'o', - graph_column:str = 'g' -) -> None: - """Write records stored in a DataFrame into Amazon Neptune. - - The DataFrame must consist of triples with column names for the subject, predicate, and object specified. + client: NeptuneClient, + df: pd.DataFrame, + batch_size: int = 50, + subject_column: str = "s", + predicate_column: str = "p", + object_column: str = "o", + graph_column: str = "g", +) -> bool: + """Write records stored in a DataFrame into Amazon Neptune. + + The DataFrame must consist of triples with column names for the subject, predicate, and object specified. If you want to add data into a named graph then you will also need the graph column. Parameters ---------- - client : NeptuneClient + client (NeptuneClient) : instance of the neptune client to use - df : pandas.DataFrame + df (pandas.DataFrame) : Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html + subject_column (str, optional) : + The column name in the dataframe for the subject. Defaults to 's' + predicate_column (str, optional) : + The column name in the dataframe for the predicate. Defaults to 'p' + object_column (str, optional) : + The column name in the dataframe for the object. Defaults to 'o' + graph_column (str, optional) : + The column name in the dataframe for the graph if sending across quads. Defaults to 'g' Returns ------- @@ -270,10 +207,10 @@ def to_rdf_graph( Examples -------- - Writing to Amazon Neptune + Writing to Amazon Neptune >>> import awswrangler as wr - >>> client = wr.neptune.Client(host='NEPTUNE-ENDPOINT') + >>> client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) >>> wr.neptune.gremlin.to_rdf_graph( ... df=df ... ) @@ -284,26 +221,97 @@ def to_rdf_graph( is_quads = True else: raise exceptions.InvalidArgumentValue( - "Dataframe must contain at least the subject, predicate, and object columns defined or the defaults (s, p, o) to be saved to Amazon Neptune" + """Dataframe must contain at least the subject, predicate, and object columns defined or the defaults + (s, p, o) to be saved to Amazon Neptune""" ) query = "" # Loop through items in the DF for (index, row) in df.iterrows(): - # build up a query - if is_quads: - insert = f"INSERT DATA {{ GRAPH <{row[graph_column]}> {{<{row[subject_column]}> <{str(row[predicate_column])}> <{row[object_column]}> . }} }}; " + # build up a query + if is_quads: + insert = f"""INSERT DATA {{ GRAPH <{row[graph_column]}> {{<{row[subject_column]}> + <{str(row[predicate_column])}> <{row[object_column]}> . }} }}; """ query = query + insert else: - insert = f"INSERT DATA {{ <{row[subject_column]}> <{str(row[predicate_column])}> <{row[object_column]}> . }}; " + insert = f"""INSERT DATA {{ <{row[subject_column]}> <{str(row[predicate_column])}> + <{row[object_column]}> . }}; """ query = query + insert # run the query if index > 0 and index % batch_size == 0: res = client.write_sparql(query) if res: - query="" + query = "" return client.write_sparql(query) -def connect(host: str, port: str, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: +def connect(host: str, port: int, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: + """Creates a connection to a Neptune cluster + + Args: + host (str): The host endpoint to connect to + port (int): The port endpoint to connect to + iam_enabled (bool, optional): set to True if IAM is enabled on the cluster. Defaults to False. + + Returns: + NeptuneClient: [description] + """ return NeptuneClient(host, port, iam_enabled, **kwargs) + + +def _build_gremlin_update(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: + g = g.V(str(row["~id"])) + for (column, value) in row.items(): + if column not in ["~id", "~label"]: + if isinstance(value, list) and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + + +def _build_gremlin_insert_vertices(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: + g = g.V(str(row["~id"])).fold().coalesce(__.unfold(), __.addV(row["~label"]).property(T.id, str(row["~id"]))) + for (column, value) in row.items(): + if column not in ["~id", "~label"]: + if isinstance(value, list) and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + + +def _build_gremlin_insert_edges(g: GraphTraversalSource, row: pd.Series) -> GraphTraversalSource: + g = ( + g.V(str(row["~from"])) + .fold() + .coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row["~from"], "~label": "Vertex"})) + .addE(row["~label"]) + .to( + __.V(str(row["~to"])) + .fold() + .coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row["~to"], "~label": "Vertex"})) + ) + ) + for (column, value) in row.items(): + if column not in ["~id", "~label", "~to", "~from"]: + if isinstance(value, list) and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, column, item) + elif not pd.isna(value) and not pd.isnull(value): + g = g.property(column, value) + + return g + + +def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: + translator = Translator("g") + s = translator.translate(g.bytecode) + s = s.replace("Cardinality.", "") # hack to fix parser error for set cardinality + _logger.debug(s) + res = client.write_gremlin(s) + return res diff --git a/poetry.lock b/poetry.lock index 0c3527b42..3ae52d7c2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1741,6 +1741,25 @@ python-versions = ">=3.6" cffi = {version = "*", markers = "implementation_name == \"pypy\""} py = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "rdflib" +version = "5.0.0" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +isodate = "*" +pyparsing = "*" +six = "*" + +[package.extras] +docs = ["sphinx (<3)", "sphinxcontrib-apidoc"] +html = ["html5lib"] +sparql = ["requests"] +tests = ["html5lib", "networkx", "nose", "doctest-ignore-unicode"] + [[package]] name = "redshift-connector" version = "2.0.903" @@ -1904,6 +1923,20 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "sparqlwrapper" +version = "1.8.5" +description = "SPARQL Endpoint interface to Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +rdflib = ">=4.0" + +[package.extras] +keepalive = ["keepalive (>=0.5)"] + [[package]] name = "sphinx" version = "4.3.2" @@ -2276,7 +2309,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "d1a32ec225985689e4adc9d5393a557b295dadabd1afa710c6e21326c199436d" +content-hash = "fca6373d76d552d83a2aa8814a90486feccb67b19b6371d0b92bcc0cb2aa0d3a" [metadata.files] aenum = [ @@ -3616,6 +3649,10 @@ pyzmq = [ {file = "pyzmq-22.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d6157793719de168b199194f6b6173f0ccd3bf3499e6870fac17086072e39115"}, {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, ] +rdflib = [ + {file = "rdflib-5.0.0-py3-none-any.whl", hash = "sha256:88208ea971a87886d60ae2b1a4b2cdc263527af0454c422118d43fe64b357877"}, + {file = "rdflib-5.0.0.tar.gz", hash = "sha256:78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155"}, +] redshift-connector = [ {file = "redshift_connector-2.0.903-py3-none-any.whl", hash = "sha256:cbd34a1d19ecfbbe06bc9359e2c81c8e8a1ed9496dcc24c3e945b29e4a321deb"}, ] @@ -3666,6 +3703,13 @@ soupsieve = [ {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, ] +sparqlwrapper = [ + {file = "SPARQLWrapper-1.8.5-py2-none-any.whl", hash = "sha256:357ee8a27bc910ea13d77836dbddd0b914991495b8cc1bf70676578155e962a8"}, + {file = "SPARQLWrapper-1.8.5-py2.7.egg", hash = "sha256:17ec44b08b8ae2888c801066249f74fe328eec25d90203ce7eadaf82e64484c7"}, + {file = "SPARQLWrapper-1.8.5-py3-none-any.whl", hash = "sha256:c7f9c9d8ebb13428771bc3b6dee54197422507dcc3dea34e30d5dcfc53478dec"}, + {file = "SPARQLWrapper-1.8.5-py3.4.egg", hash = "sha256:8cf6c21126ed76edc85c5c232fd6f77b9f61f8ad1db90a7147cdde2104aff145"}, + {file = "SPARQLWrapper-1.8.5.tar.gz", hash = "sha256:d6a66b5b8cda141660e07aeb00472db077a98d22cb588c973209c7336850fb3c"}, +] sphinx = [ {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, diff --git a/pyproject.toml b/pyproject.toml index 645ba9e7a..9439ea3eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ pyodbc = { version = "~4.0.32", optional = true } gremlinpython = "^3.5.2" backoff = "^1.11.1" nest-asyncio = "^1.5.4" +SPARQLWrapper = "^1.8.5" [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/test_infra/stacks/databases_stack.py b/test_infra/stacks/databases_stack.py index 8c0cde255..b06294531 100644 --- a/test_infra/stacks/databases_stack.py +++ b/test_infra/stacks/databases_stack.py @@ -5,13 +5,13 @@ from aws_cdk import aws_iam as iam from aws_cdk import aws_kms as kms from aws_cdk import aws_lakeformation as lf +from aws_cdk import aws_neptune as neptune from aws_cdk import aws_rds as rds from aws_cdk import aws_redshift as redshift from aws_cdk import aws_s3 as s3 from aws_cdk import aws_secretsmanager as secrets from aws_cdk import aws_ssm as ssm from aws_cdk import core as cdk -from aws_cdk import aws_neptune as neptune class DatabasesStack(cdk.Stack): # type: ignore @@ -565,15 +565,16 @@ def _setup_sqlserver(self) -> None: cdk.CfnOutput(self, "SqlServerDatabase", value=database) cdk.CfnOutput(self, "SqlServerSchema", value=schema) - - def _setup_neptune(self, iam_enabled=False, port=8182) -> None: - cluster = neptune.DatabaseCluster(self, "DataWrangler", + def _setup_neptune(self, iam_enabled=False, port=8182) -> None: + cluster = neptune.DatabaseCluster( + self, + "DataWrangler", vpc=self.vpc, - instance_type=neptune.InstanceType.R5_LARGE, - iam_authentication=iam_enabled + instance_type=neptune.InstanceType.R5_LARGE, + iam_authentication=iam_enabled, ) - + cdk.CfnOutput(self, "NeptuneClusterEndpoint", value=cluster.cluster_endpoint.hostname) cdk.CfnOutput(self, "NeptuneReaderEndpoint", value=cluster.cluster_read_endpoint.hostname) cdk.CfnOutput(self, "NeptunePort", value=str(port)) - cdk.CfnOutput(self, "NeptuneIAMEnabled", value=str(iam_enabled)) \ No newline at end of file + cdk.CfnOutput(self, "NeptuneIAMEnabled", value=str(iam_enabled)) diff --git a/tests/test_neptune.py b/tests/test_neptune.py index 04fe6e36b..f5e56333b 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -1,11 +1,11 @@ import logging +import random +import string +import uuid from typing import Any, Dict import pandas as pd import pytest # type: ignore -import uuid -import random -import string import awswrangler as wr @@ -13,45 +13,42 @@ logging.getLogger("awswrangler").setLevel(logging.DEBUG) + @pytest.fixture(scope="session") def cloudformation_outputs(): - outputs = {} - outputs['endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' - outputs['read_endpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' - outputs['port'] = 8182 - outputs['iam_enabled'] = False - return outputs + return extract_cloudformation_outputs() @pytest.fixture(scope="session") def neptune_endpoint(cloudformation_outputs) -> str: - return cloudformation_outputs["endpoint"] + return cloudformation_outputs["NeptuneClusterEndpoint"] @pytest.fixture(scope="session") -def neptune_read_endpoint(cloudformation_outputs) -> str: - return cloudformation_outputs["read_endpoint"] +def neptune_port(cloudformation_outputs) -> int: + return cloudformation_outputs["NeptunePort"] @pytest.fixture(scope="session") -def neptune_port(cloudformation_outputs) -> int: - return cloudformation_outputs["port"] +def neptune_iam_enabled(cloudformation_outputs) -> int: + return cloudformation_outputs["NeptuneIAMEnabled"] -def test_connection_neptune_https(neptune_endpoint, neptune_port): - client = wr.neptune.connect(host=neptune_endpoint, port=neptune_port, iam_enabled=False) +def test_connection_neptune_https(neptune_endpoint, neptune_port, neptune_iam_enabled): + client = wr.neptune.connect(host=neptune_endpoint, port=neptune_port, iam_enabled=neptune_iam_enabled) resp = client.status() - assert resp['status'] == 'healthy' + assert resp["status"] == "healthy" def test_connection_neptune_https_iam(neptune_endpoint, neptune_port): client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=True) resp = client.status() - assert resp['status'] == 'healthy' + assert resp["status"] == "healthy" def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + wr.neptune.execute_opencypher(client, "create (a { name: 'foo' })-[:TEST]->(b {name : 'bar'})") df = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 1) @@ -68,17 +65,18 @@ def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert isinstance(df, pd.DataFrame) assert df.shape == (1, 2) row = df.iloc[0] - assert row['id(n)'] - assert row['labels(n)'] + assert row["id(n)"] + assert row["labels(n)"] def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - + + wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{str(id)}')") df = wr.neptune.execute_gremlin(client, "g.V().limit(1)") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 2) - + df = wr.neptune.execute_gremlin(client, "g.V().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 2) @@ -86,11 +84,12 @@ def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - + + wr.neptune.execute_gremlin(client, "g.addE('bar').from(addV('foo')).to(addV('foo'))") df = wr.neptune.execute_gremlin(client, "g.E().limit(1)") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 4) - + df = wr.neptune.execute_gremlin(client, "g.E().limit(2)") assert isinstance(df, pd.DataFrame) assert df.shape == (2, 4) @@ -98,162 +97,169 @@ def test_gremlin_query_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: def test_gremlin_query_no_results(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - + df = wr.neptune.execute_gremlin(client, "g.V('foo').drop()") assert isinstance(df, pd.DataFrame) def test_sparql_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + df = wr.neptune.execute_sparql(client, "INSERT DATA { }") + df = wr.neptune.execute_sparql(client, "INSERT DATA { }") df = wr.neptune.execute_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 1") assert isinstance(df, pd.DataFrame) - assert df.shape == (1,3) + assert df.shape == (1, 3) df = wr.neptune.execute_sparql(client, "SELECT ?s ?p ?o {?s ?p ?o} LIMIT 2") assert isinstance(df, pd.DataFrame) - assert df.shape == (2,3) + assert df.shape == (2, 3) def test_gremlin_write_updates(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - id=uuid.uuid4() + id = uuid.uuid4() wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{str(id)}')") - data=[{'~id': id, 'age': 50}] + data = [{"~id": id, "age": 50}] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - final_df = wr.neptune.execute_gremlin(client, f"g.V('{str(id)}').values('age')") + final_df = wr.neptune.execute_gremlin(client, f"g.V('{str(id)}').values('age')") assert final_df.iloc[0][0] == 50 - + def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + wr.neptune.execute_gremlin(client, "g.addV('foo')") initial_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") data = [_create_dummy_vertex(), _create_dummy_vertex(), _create_dummy_vertex()] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - final_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") + final_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 # check to make sure batch addition of vertices works - data=[] + data = [] for i in range(0, 50): data.append(_create_dummy_vertex()) - + df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - batch_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") + batch_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + initial_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") - + data = [_create_dummy_edge(), _create_dummy_edge(), _create_dummy_edge()] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - final_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") + final_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 # check to make sure batch addition of edges works - data=[] + data = [] for i in range(0, 50): data.append(_create_dummy_edge()) - + df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) assert res - batch_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") + batch_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 - + def test_sparql_write_triples(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) initial_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") - + data = [_create_dummy_triple(), _create_dummy_triple(), _create_dummy_triple()] df = pd.DataFrame(data) res = wr.neptune.to_rdf_graph(client, df) assert res - final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") + final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") assert len(final_df.index) == len(initial_df.index) + 3 - + # check to make sure batch addition of edges works - data=[] + data = [] for i in range(0, 50): data.append(_create_dummy_triple()) - + df = pd.DataFrame(data) res = wr.neptune.to_rdf_graph(client, df) assert res - batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") + batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o WHERE { ?p ?o .}") assert len(batch_df.index) == len(final_df.index) + 50 - + + def test_sparql_write_quads(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) initial_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") - + data = [_create_dummy_quad(), _create_dummy_quad(), _create_dummy_quad()] df = pd.DataFrame(data) res = wr.neptune.to_rdf_graph(client, df) assert res - final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") + final_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") assert len(final_df.index) == len(initial_df.index) + 3 - + # check to make sure batch addition of edges works - data=[] + data = [] for i in range(0, 50): data.append(_create_dummy_quad()) - + df = pd.DataFrame(data) res = wr.neptune.to_rdf_graph(client, df) assert res - batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") + batch_df = wr.neptune.execute_sparql(client, "SELECT ?p ?o FROM WHERE { ?p ?o .}") assert len(batch_df.index) == len(final_df.index) + 50 def _create_dummy_vertex() -> Dict[str, Any]: data = dict() - data['~id']=uuid.uuid4() - data['~label']='foo' - data['int'] = random.randint(0, 1000) - data['str'] = ''.join(random.choice(string.ascii_lowercase) for i in range(10)) - data['list'] = [random.randint(0, 1000), random.randint(0, 1000)] + data["~id"] = uuid.uuid4() + data["~label"] = "foo" + data["int"] = random.randint(0, 1000) + data["str"] = "".join(random.choice(string.ascii_lowercase) for i in range(10)) + data["list"] = [random.randint(0, 1000), random.randint(0, 1000)] return data + def _create_dummy_edge() -> Dict[str, Any]: data = dict() - data['~id']=uuid.uuid4() - data['~label']='bar' - data['~to']=uuid.uuid4() - data['~from']=uuid.uuid4() - data['int'] = random.randint(0, 1000) - data['str'] = ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + data["~id"] = uuid.uuid4() + data["~label"] = "bar" + data["~to"] = uuid.uuid4() + data["~from"] = uuid.uuid4() + data["int"] = random.randint(0, 1000) + data["str"] = "".join(random.choice(string.ascii_lowercase) for i in range(10)) return data def _create_dummy_triple() -> Dict[str, Any]: data = dict() - data['s']='foo' - data['p']=uuid.uuid4() - data['o'] = random.randint(0, 1000) + data["s"] = "foo" + data["p"] = uuid.uuid4() + data["o"] = random.randint(0, 1000) return data + def _create_dummy_quad() -> Dict[str, Any]: data = _create_dummy_triple() - data['g']='bar' - return data \ No newline at end of file + data["g"] = "bar" + return data diff --git a/tests/test_neptune_parsing.py b/tests/test_neptune_parsing.py index 02bf3cdf3..1650ff682 100644 --- a/tests/test_neptune_parsing.py +++ b/tests/test_neptune_parsing.py @@ -2,24 +2,21 @@ import pandas as pd import pytest # type: ignore -from gremlin_python.structure.graph import Path -from gremlin_python.structure.graph import Vertex -from gremlin_python.structure.graph import Edge -from gremlin_python.structure.graph import VertexProperty -from gremlin_python.structure.graph import Property from gremlin_python.process.traversal import T +from gremlin_python.structure.graph import Edge, Path, Property, Vertex, VertexProperty import awswrangler as wr logging.getLogger("awswrangler").setLevel(logging.DEBUG) + @pytest.fixture(scope="session") def gremlin_parser() -> wr.neptune.GremlinParser: c = object.__new__(wr.neptune.GremlinParser) return c -#parse Vertex elements +# parse Vertex elements def test_parse_gremlin_vertex_elements(gremlin_parser): # parse vertex elements v = Vertex("foo") @@ -27,9 +24,9 @@ def test_parse_gremlin_vertex_elements(gremlin_parser): out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,2) - assert row['id'] == 'foo' - assert row['label'] == 'vertex' + assert df.shape == (1, 2) + assert row["id"] == "foo" + assert row["label"] == "vertex" # parse multiple vertex elements v1 = Vertex("bar") @@ -37,152 +34,157 @@ def test_parse_gremlin_vertex_elements(gremlin_parser): out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[1] - assert df.shape == (2,2) - assert row['id'] == 'bar' - assert row['label'] == 'vertex' + assert df.shape == (2, 2) + assert row["id"] == "bar" + assert row["label"] == "vertex" -#parse Edge elements +# parse Edge elements def test_parse_gremlin_edge_elements(gremlin_parser): # parse edge elements - v = Edge("foo", 'out1', 'label', 'in1') + v = Edge("foo", "out1", "label", "in1") input = [v] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,4) - assert row['id'] == 'foo' - assert row['outV'] == 'out1' - assert row['label'] == 'label' - assert row['inV'] == 'in1' + assert df.shape == (1, 4) + assert row["id"] == "foo" + assert row["outV"] == "out1" + assert row["label"] == "label" + assert row["inV"] == "in1" # parse multiple edge elements - v1 = Edge("bar", 'out1', 'label', 'in2') + v1 = Edge("bar", "out1", "label", "in2") input = [v, v1] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[1] - assert df.shape == (2,4) - assert row['id'] == 'bar' - assert row['outV'] == 'out1' - assert row['label'] == 'label' - assert row['inV'] == 'in2' + assert df.shape == (2, 4) + assert row["id"] == "bar" + assert row["outV"] == "out1" + assert row["label"] == "label" + assert row["inV"] == "in2" -#parse Property elements +# parse Property elements def test_parse_gremlin_property_elements(gremlin_parser): # parse VertexProperty elements - v = VertexProperty("foo", 'name', 'bar', 'v1') + v = VertexProperty("foo", "name", "bar", "v1") input = [v] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,5) - assert row['id'] == 'foo' - assert row['label'] == 'name' - assert row['value'] == 'bar' - assert row['key'] == 'name' - assert row['vertex'] == 'v1' - - v = Property("foo", 'name', 'bar') + assert df.shape == (1, 5) + assert row["id"] == "foo" + assert row["label"] == "name" + assert row["value"] == "bar" + assert row["key"] == "name" + assert row["vertex"] == "v1" + + v = Property("foo", "name", "bar") input = [v] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,3) - assert row['element'] == 'bar' - assert row['value'] == 'name' - assert row['key'] == 'foo' + assert df.shape == (1, 3) + assert row["element"] == "bar" + assert row["value"] == "name" + assert row["key"] == "foo" -#parse Path elements +# parse Path elements def test_parse_gremlin_path_elements(gremlin_parser): - #parse path with elements + # parse path with elements v = Vertex("foo") v2 = Vertex("bar") - e1 = Edge("e1", 'foo', 'label', 'bar') + e1 = Edge("e1", "foo", "label", "bar") p = Path(labels=["vertex", "label", "vertex"], objects=[v, e1, v2]) out = gremlin_parser.gremlin_results_to_dict([p]) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,3) - assert row[0] == {'id': 'foo', 'label': 'vertex'} - assert row[1] == {'id': 'e1', 'label': 'label', 'outV': 'foo', 'inV': 'bar'} - assert row[2] == {'id': 'bar', 'label': 'vertex'} + assert df.shape == (1, 3) + assert row[0] == {"id": "foo", "label": "vertex"} + assert row[1] == {"id": "e1", "label": "label", "outV": "foo", "inV": "bar"} + assert row[2] == {"id": "bar", "label": "vertex"} - #parse path with multiple elements - e2 = Edge("bar", 'out1', 'label', 'in2') + # parse path with multiple elements + e2 = Edge("bar", "out1", "label", "in2") v3 = Vertex("in2") p1 = Path(labels=["vertex", "label", "vertex"], objects=[v2, e2, v3]) out = gremlin_parser.gremlin_results_to_dict([p, p1]) df = pd.DataFrame.from_records(out) row = df.iloc[1] - assert df.shape == (2,3) - assert row[0] == {'id': 'bar', 'label': 'vertex'} - assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} - assert row[2] == {'id': 'in2', 'label': 'vertex'} - - #parse path with maps - p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, {'dist': 32}, {'name': 'bar', 'age': 40}]) + assert df.shape == (2, 3) + assert row[0] == {"id": "bar", "label": "vertex"} + assert row[1] == {"id": "bar", "label": "label", "outV": "out1", "inV": "in2"} + assert row[2] == {"id": "in2", "label": "vertex"} + + # parse path with maps + p = Path( + labels=["vertex", "label", "vertex"], + objects=[{"name": "foo", "age": 29}, {"dist": 32}, {"name": "bar", "age": 40}], + ) out = gremlin_parser.gremlin_results_to_dict([p]) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,3) - assert row[0]['name'] == 'foo' - assert row[0]['age'] == 29 - assert row[1]['dist'] == 32 - assert row[2]['name'] == 'bar' - assert row[2]['age'] == 40 - - #parse path with mixed elements and maps - p = Path(labels=["vertex", "label", "vertex"], objects=[{'name': 'foo', 'age': 29}, - Edge("bar", 'out1', 'label', 'in2'), {'name': 'bar', 'age': 40}]) + assert df.shape == (1, 3) + assert row[0]["name"] == "foo" + assert row[0]["age"] == 29 + assert row[1]["dist"] == 32 + assert row[2]["name"] == "bar" + assert row[2]["age"] == 40 + + # parse path with mixed elements and maps + p = Path( + labels=["vertex", "label", "vertex"], + objects=[{"name": "foo", "age": 29}, Edge("bar", "out1", "label", "in2"), {"name": "bar", "age": 40}], + ) out = gremlin_parser.gremlin_results_to_dict([p]) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,3) - assert row[0]['name'] == 'foo' - assert row[0]['age'] == 29 - assert row[1] == {'id': 'bar', 'label': 'label', 'outV': 'out1', 'inV': 'in2'} - assert row[2]['name'] == 'bar' - assert row[2]['age'] == 40 + assert df.shape == (1, 3) + assert row[0]["name"] == "foo" + assert row[0]["age"] == 29 + assert row[1] == {"id": "bar", "label": "label", "outV": "out1", "inV": "in2"} + assert row[2]["name"] == "bar" + assert row[2]["age"] == 40 -#parse vertex valueMap +# parse vertex valueMap def test_parse_gremlin_maps(gremlin_parser): # parse map - m = {'name': 'foo', 'age': 29} + m = {"name": "foo", "age": 29} input = [m] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,2) - assert row['name'] == 'foo' - assert row['age'] == 29 + assert df.shape == (1, 2) + assert row["name"] == "foo" + assert row["age"] == 29 - # parse multiple maps with T - m1 = {'name': ['foo'], T.id: '2', 'age': [40], T.label: 'vertex'} + # parse multiple maps with T + m1 = {"name": ["foo"], T.id: "2", "age": [40], T.label: "vertex"} input = [m, m1] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[1] - assert df.shape == (2,4) - assert row['name'] == 'foo' - assert row['age'] == 40 - assert row[T.id] == '2' - assert row[T.label] == 'vertex' - m2 = {'name': ['foo', 'bar'], T.id: '2', T.label: 'vertex'} + assert df.shape == (2, 4) + assert row["name"] == "foo" + assert row["age"] == 40 + assert row[T.id] == "2" + assert row[T.label] == "vertex" + m2 = {"name": ["foo", "bar"], T.id: "2", T.label: "vertex"} input = [m, m1, m2] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[2] - assert df.shape == (3,4) - assert row['name'] == ['foo', 'bar'] - assert row[T.id] == '2' - assert row[T.label] == 'vertex' + assert df.shape == (3, 4) + assert row["name"] == ["foo", "bar"] + assert row[T.id] == "2" + assert row[T.label] == "vertex" -#parse scalar +# parse scalar def test_parse_gremlin_scalar(gremlin_parser): # parse map m = 12 @@ -191,19 +193,19 @@ def test_parse_gremlin_scalar(gremlin_parser): out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (2,1) + assert df.shape == (2, 1) assert row[0] == 12 row = df.iloc[1] assert row[0] == "Foo" -#parse subgraph +# parse subgraph def test_parse_gremlin_subgraph(gremlin_parser): - m = {'@type': 'tinker:graph', '@value': {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']}} + m = {"@type": "tinker:graph", "@value": {"vertices": ["v[45]", "v[9]"], "edges": ["e[3990][9-route->45]"]}} input = [m] out = gremlin_parser.gremlin_results_to_dict(input) df = pd.DataFrame.from_records(out) row = df.iloc[0] - assert df.shape == (1,2) - assert row['@type'] == 'tinker:graph' - assert row['@value'] == {'vertices': ['v[45]', 'v[9]'], 'edges': ['e[3990][9-route->45]']} + assert df.shape == (1, 2) + assert row["@type"] == "tinker:graph" + assert row["@value"] == {"vertices": ["v[45]", "v[9]"], "edges": ["e[3990][9-route->45]"]} From 31df062f313e5253798d8ec935af5ec787600efb Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Mon, 28 Feb 2022 12:40:49 -0900 Subject: [PATCH 14/32] Added GremlinParser to the init file so that it can be unit tested. --- awswrangler/neptune/__init__.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index 0819559e2..dea86997a 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -7,5 +7,14 @@ to_property_graph, to_rdf_graph, ) +from neptune.gremlin_parser import GremlinParser -__all__ = ["execute_gremlin", "execute_opencypher", "execute_sparql", "to_property_graph", "to_rdf_graph", "connect"] +__all__ = [ + "execute_gremlin", + "execute_opencypher", + "execute_sparql", + "to_property_graph", + "to_rdf_graph", + "connect", + "GremlinParser", +] From d61a4c940c45d5c7ab29b81c5de928f9906b55ee Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 4 Mar 2022 14:26:03 -0900 Subject: [PATCH 15/32] Added better error handling on query exceptions for all languages --- awswrangler/neptune/client.py | 12 +++++++++--- tests/test_neptune.py | 23 ++++++++++++++++++++++- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 807e0c3b2..0fc1b8195 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -133,7 +133,10 @@ def read_opencypher(self, query: str, headers: Any = None) -> Any: req = self._prepare_request("POST", url, data=data, headers=headers) res = self._http_session.send(req) _logger.debug(res) - return res.json()["results"] + if res.ok: + return res.json()["results"] + else: + raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def read_gremlin(self, query: str) -> Any: """Executes the provided Gremlin traversal and returns the results @@ -174,7 +177,7 @@ def _execute_gremlin(self, query: str) -> Any: except Exception as e: c.close() _logger.error(e) - raise e + raise exceptions.QueryFailed(e) def read_sparql(self, query: str, headers: Any = None) -> Any: """Executes the given query and returns the results @@ -222,7 +225,10 @@ def _execute_sparql(self, query: str, headers: Any) -> Any: req = self._prepare_request("POST", uri, data=data, headers=headers) res = self._http_session.send(req) _logger.debug(res) - return res.json() + if res.ok: + return res.json() + else: + raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def status(self) -> Any: """Returns the status of the Neptune cluster diff --git a/tests/test_neptune.py b/tests/test_neptune.py index f5e56333b..e99e388e2 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -69,10 +69,31 @@ def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert row["labels(n)"] +def test_opencypher_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_opencypher(client, "MATCH (n) LIMIT 2") + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_opencypher(client, "") + +def test_gremlin_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_gremlin(client, "g.V().limit(1") + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_gremlin(client, "") + +def test_sparql_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_sparql(client, "SELECT ?s ?p ?o {?s ?pLIMIT 1") + with pytest.raises(wr.exceptions.QueryFailed): + wr.neptune.execute_sparql(client, "") + def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{str(id)}')") + wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{uuid.uuid4()}')") df = wr.neptune.execute_gremlin(client, "g.V().limit(1)") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 2) From 78d04ab8304d458f7b906b9a95d97570963d1f2a Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 4 Mar 2022 14:27:08 -0900 Subject: [PATCH 16/32] Fixed validation error --- tests/test_neptune.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_neptune.py b/tests/test_neptune.py index e99e388e2..da3877225 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -76,6 +76,7 @@ def test_opencypher_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, with pytest.raises(wr.exceptions.QueryFailed): wr.neptune.execute_opencypher(client, "") + def test_gremlin_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) with pytest.raises(wr.exceptions.QueryFailed): @@ -83,6 +84,7 @@ def test_gremlin_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, An with pytest.raises(wr.exceptions.QueryFailed): wr.neptune.execute_gremlin(client, "") + def test_sparql_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) with pytest.raises(wr.exceptions.QueryFailed): @@ -90,6 +92,7 @@ def test_sparql_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any with pytest.raises(wr.exceptions.QueryFailed): wr.neptune.execute_sparql(client, "") + def test_gremlin_query_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) From d654903e870cd8f253514e3913e40667dea0cb74 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Tue, 8 Mar 2022 18:12:02 -0900 Subject: [PATCH 17/32] Added method to flatten dataframes to Neptune module --- awswrangler/neptune/__init__.py | 4 ++- awswrangler/neptune/client.py | 8 ++--- awswrangler/neptune/neptune.py | 63 +++++++++++++++++++++++++++++++-- tests/test_neptune.py | 25 ++++++++++++- 4 files changed, 92 insertions(+), 8 deletions(-) diff --git a/awswrangler/neptune/__init__.py b/awswrangler/neptune/__init__.py index dea86997a..4253b20c5 100644 --- a/awswrangler/neptune/__init__.py +++ b/awswrangler/neptune/__init__.py @@ -1,13 +1,14 @@ """Utilities Module for Amazon Neptune.""" +from awswrangler.neptune.gremlin_parser import GremlinParser from awswrangler.neptune.neptune import ( connect, execute_gremlin, execute_opencypher, execute_sparql, + flatten_nested_df, to_property_graph, to_rdf_graph, ) -from neptune.gremlin_parser import GremlinParser __all__ = [ "execute_gremlin", @@ -17,4 +18,5 @@ "to_rdf_graph", "connect", "GremlinParser", + "flatten_nested_df", ] diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 0fc1b8195..6c61644df 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -138,7 +138,7 @@ def read_opencypher(self, query: str, headers: Any = None) -> Any: else: raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") - def read_gremlin(self, query: str) -> Any: + def read_gremlin(self, query: str, headers: Any = None) -> Any: """Executes the provided Gremlin traversal and returns the results Args: @@ -147,7 +147,7 @@ def read_gremlin(self, query: str) -> Any: Returns: Any: [description] """ - return self._execute_gremlin(query) + return self._execute_gremlin(query, headers) def write_gremlin(self, query: str) -> bool: """Executes a Gremlin write query @@ -162,11 +162,11 @@ def write_gremlin(self, query: str) -> bool: _logger.debug(res) return True - def _execute_gremlin(self, query: str) -> Any: + def _execute_gremlin(self, query: str, headers: Any = None) -> Any: try: nest_asyncio.apply() uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" - request = self._prepare_request("GET", uri) + request = self._prepare_request("GET", uri, headers=headers) ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" c = client.Client(ws_url, "g", headers=dict(request.headers)) result = c.submit(query) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 245ac9c56..e18b4e5dd 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -98,12 +98,14 @@ def execute_sparql(client: NeptuneClient, query: str) -> pd.DataFrame: ?person foaf:name ?name . """ data = client.read_sparql(query) + df = None if "results" in data and "bindings" in data["results"]: df = pd.DataFrame(data["results"]["bindings"]) df.applymap(lambda x: x["value"]) - return df else: - return pd.DataFrame(data) + df = pd.DataFrame(data) + + return df def to_property_graph(client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50) -> bool: @@ -315,3 +317,60 @@ def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: _logger.debug(s) res = client.write_gremlin(s) return res + + +def flatten_nested_df( + df: pd.DataFrame, include_prefix: bool = True, seperator: str = "_", recursive: bool = True +) -> pd.DataFrame: + """This will flatten the lists and dictionaries of the input data frame + + Args: + df (pd.DataFrame): The input data frame + include_prefix (bool, optional): If True, then it will prefix the new column name with the original column name. + Defaults to True. + seperator (str, optional): The seperator to use between field names when a dictionary is exploded. + Defaults to "_". + recursive (bool, optional): If True, then this will recurse the fields in the data frame. Defaults to True. + + Returns: + pd.DataFrame: The flattened data frame + """ + if seperator is None: + seperator = "_" + df = df.reset_index() + + # search for list and map + s = (df.applymap(type) == list).all() + list_columns = s[s].index.tolist() + + s = (df.applymap(type) == dict).all() + dict_columns = s[s].index.tolist() + + if len(list_columns) > 0 or len(dict_columns) > 0: + new_columns = [] + + for col in dict_columns: + # expand dictionaries horizontally + expanded = None + if include_prefix: + expanded = pd.json_normalize(df[col], sep=seperator).add_prefix(f"{col}") + else: + expanded = pd.json_normalize(df[col], sep=seperator) + expanded.index = df.index + df = pd.concat([df, expanded], axis=1).drop(columns=[col]) + new_columns.extend(expanded.columns) + + for col in list_columns: + df = df.drop(columns=[col]).join(df[col].explode().to_frame()) + new_columns.append(col) + + # check if there are still dict o list fields to flatten + s = (df[new_columns].applymap(type) == list).all() + list_columns = s[s].index.tolist() + + s = (df[new_columns].applymap(type) == dict).all() + dict_columns = s[s].index.tolist() + if recursive and (len(list_columns) > 0 or len(dict_columns) > 0): + df = flatten_nested_df(df, include_prefix=include_prefix, seperator=seperator, recursive=recursive) + + return df diff --git a/tests/test_neptune.py b/tests/test_neptune.py index da3877225..76374f948 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -48,7 +48,7 @@ def test_connection_neptune_https_iam(neptune_endpoint, neptune_port): def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - wr.neptune.execute_opencypher(client, "create (a { name: 'foo' })-[:TEST]->(b {name : 'bar'})") + wr.neptune.execute_opencypher(client, "create (a:Foo { name: 'foo' })-[:TEST]->(b {name : 'bar'})") df = wr.neptune.execute_opencypher(client, "MATCH (n) RETURN n LIMIT 1") assert isinstance(df, pd.DataFrame) assert df.shape == (1, 1) @@ -69,6 +69,29 @@ def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert row["labels(n)"] +def test_flatten_df(neptune_endpoint, neptune_port) -> Dict[str, Any]: + client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) + wr.neptune.execute_opencypher(client, "create (a:Foo { name: 'foo' })-[:TEST]->(b {name : 'bar'})") + df = wr.neptune.execute_opencypher(client, "MATCH (n:Foo) RETURN n LIMIT 1") + df_test = wr.neptune.flatten_nested_df(df) + assert isinstance(df_test, pd.DataFrame) + assert df_test.shape == (1, 6) + row = df_test.iloc[0] + assert row["n~properties_name"] + + df_test = wr.neptune.flatten_nested_df(df, include_prefix=False) + assert isinstance(df_test, pd.DataFrame) + assert df_test.shape == (1, 6) + row = df_test.iloc[0] + assert row["~properties_name"] + + df_test = wr.neptune.flatten_nested_df(df, seperator="|") + assert isinstance(df_test, pd.DataFrame) + assert df_test.shape == (1, 6) + row = df_test.iloc[0] + assert row["n~properties|name"] + + def test_opencypher_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) with pytest.raises(wr.exceptions.QueryFailed): From 9e486d10013a6f6c0304b3f0cea93c981e04fec1 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Tue, 8 Mar 2022 18:40:12 -0900 Subject: [PATCH 18/32] Fixed issues related to flattening DF --- awswrangler/neptune/neptune.py | 4 ++-- tests/test_neptune.py | 12 +++++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index e18b4e5dd..0f9c617ed 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -353,9 +353,9 @@ def flatten_nested_df( # expand dictionaries horizontally expanded = None if include_prefix: - expanded = pd.json_normalize(df[col], sep=seperator).add_prefix(f"{col}") + expanded = pd.json_normalize(df[col], sep=seperator).add_prefix(f"{col}{seperator}") else: - expanded = pd.json_normalize(df[col], sep=seperator) + expanded = pd.json_normalize(df[col], sep=seperator).add_prefix(f"{seperator}") expanded.index = df.index df = pd.concat([df, expanded], axis=1).drop(columns=[col]) new_columns.extend(expanded.columns) diff --git a/tests/test_neptune.py b/tests/test_neptune.py index 76374f948..8f94a07e0 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -77,19 +77,25 @@ def test_flatten_df(neptune_endpoint, neptune_port) -> Dict[str, Any]: assert isinstance(df_test, pd.DataFrame) assert df_test.shape == (1, 6) row = df_test.iloc[0] - assert row["n~properties_name"] + assert row["n_~properties_name"] df_test = wr.neptune.flatten_nested_df(df, include_prefix=False) assert isinstance(df_test, pd.DataFrame) assert df_test.shape == (1, 6) row = df_test.iloc[0] - assert row["~properties_name"] + assert row["_~properties_name"] df_test = wr.neptune.flatten_nested_df(df, seperator="|") assert isinstance(df_test, pd.DataFrame) assert df_test.shape == (1, 6) row = df_test.iloc[0] - assert row["n~properties|name"] + assert row["n|~properties|name"] + + df_new = pd.DataFrame([{"~id": "0", "~labels": ["version"], "~properties": {"type": "version"}}]) + df_test = wr.neptune.flatten_nested_df(df_new) + assert df_test.shape == (1, 4) + row = df_test.iloc[0] + assert row["~properties_type"] def test_opencypher_malformed_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: From c526960ae69ac3c7950bbb5088d2da64fef388c6 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Fri, 11 Mar 2022 16:18:34 +0000 Subject: [PATCH 19/32] Fix static checks issues --- awswrangler/neptune/client.py | 17 +- awswrangler/neptune/gremlin_parser.py | 2 +- pyproject.toml | 2 +- test_infra/poetry.lock | 713 +++++++++++++------------- test_infra/pyproject.toml | 28 +- 5 files changed, 380 insertions(+), 382 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 6c61644df..ae4edca5b 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -55,7 +55,7 @@ def __ensure_session(session: boto3.Session = None) -> boto3.Session: """Ensure that a valid boto3.Session will be returned.""" if session is not None: return session - elif boto3.DEFAULT_SESSION: + if boto3.DEFAULT_SESSION: return boto3.DEFAULT_SESSION return boto3.Session() @@ -95,12 +95,12 @@ def _get_aws_request( try: frozen_creds = credentials.get_frozen_credentials() except AttributeError: - print("Could not find valid IAM credentials in any the following locations:\n") - print( + _logger.warning("Could not find valid IAM credentials in any the following locations:\n") + _logger.warning( "env, assume-role, assume-role-with-web-identity, sso, shared-credential-file, custom-process, " "config-file, ec2-credentials-file, boto-config, container-role, iam-role\n" ) - print( + _logger.warning( "Go to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html for more " "details on configuring your IAM credentials." ) @@ -108,8 +108,7 @@ def _get_aws_request( SigV4Auth(frozen_creds, service, self.region).add_auth(req) prepared_iam_req = req.prepare() return prepared_iam_req - else: - return req + return req def read_opencypher(self, query: str, headers: Any = None) -> Any: """Executes the provided openCypher query @@ -135,8 +134,7 @@ def read_opencypher(self, query: str, headers: Any = None) -> Any: _logger.debug(res) if res.ok: return res.json()["results"] - else: - raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") + raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def read_gremlin(self, query: str, headers: Any = None) -> Any: """Executes the provided Gremlin traversal and returns the results @@ -227,8 +225,7 @@ def _execute_sparql(self, query: str, headers: Any) -> Any: _logger.debug(res) if res.ok: return res.json() - else: - raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") + raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def status(self) -> Any: """Returns the status of the Neptune cluster diff --git a/awswrangler/neptune/gremlin_parser.py b/awswrangler/neptune/gremlin_parser.py index 60c4eb8e6..86d7bc310 100644 --- a/awswrangler/neptune/gremlin_parser.py +++ b/awswrangler/neptune/gremlin_parser.py @@ -45,7 +45,7 @@ def _parse_dict(data: Any) -> Any: return res # If this is an element then make it a dictionary - elif isinstance(data, (Vertex, Edge, VertexProperty, Property)): + if isinstance(data, (Vertex, Edge, VertexProperty, Property)): data = data.__dict__ # If this is a scalar then create a Map with it diff --git a/pyproject.toml b/pyproject.toml index 86db3ea74..f54f16b56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,11 +49,11 @@ progressbar2 = "^3.53.3" opensearch-py = "^1.0.0" xlrd = { version = "^2.0.1", python = "~3.6" } xlwt = { version = "^1.3.0", python = "~3.6" } -pyodbc = { version = "~4.0.32", optional = true } gremlinpython = "^3.5.2" backoff = "^1.11.1" nest-asyncio = "^1.5.4" SPARQLWrapper = "^1.8.5" +pyodbc = { version = "~4.0.32", optional = true } [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/test_infra/poetry.lock b/test_infra/poetry.lock index 1f9d516c9..b7d577cbf 100644 --- a/test_infra/poetry.lock +++ b/test_infra/poetry.lock @@ -1,687 +1,688 @@ [[package]] name = "attrs" -version = "21.2.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "aws-cdk.assets" -version = "1.144.0" +version = "1.148.0" description = "This module is deprecated. All types are now available under the core module" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-acmpca" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::ACMPCA" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-applicationautoscaling" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::ApplicationAutoScaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-autoscaling-common" = "1.144.0" -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-autoscaling-common" = "1.148.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-autoscaling-common" -version = "1.144.0" +version = "1.148.0" description = "Common implementation package for @aws-cdk/aws-autoscaling and @aws-cdk/aws-applicationautoscaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-certificatemanager" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::CertificateManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-acmpca" = "1.144.0" -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-lambda" = "1.144.0" -"aws-cdk.aws-route53" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-acmpca" = "1.148.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-lambda" = "1.148.0" +"aws-cdk.aws-route53" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudformation" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::CloudFormation" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-lambda" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-sns" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-lambda" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-sns" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudwatch" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::CloudWatch" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codeguruprofiler" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::CodeGuruProfiler" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codestarnotifications" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::CodeStarNotifications" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ec2" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::EC2" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-s3-assets" = "1.144.0" -"aws-cdk.aws-ssm" = "1.144.0" -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" -"aws-cdk.region-info" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-s3-assets" = "1.148.0" +"aws-cdk.aws-ssm" = "1.148.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" +"aws-cdk.region-info" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr-assets" -version = "1.144.0" +version = "1.148.0" description = "Docker image assets deployed to ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.144.0" -"aws-cdk.aws-ecr" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.assets" = "1.148.0" +"aws-cdk.aws-ecr" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-efs" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::EFS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-events" -version = "1.144.0" +version = "1.148.0" description = "Amazon EventBridge Construct Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-glue" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Glue" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.144.0" -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-s3-assets" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.custom-resources" = "1.144.0" +"aws-cdk.assets" = "1.148.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-s3-assets" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.custom-resources" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-iam" -version = "1.144.0" +version = "1.148.0" description = "CDK routines for easily assigning correct and minimal IAM permissions" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" -"aws-cdk.region-info" = "1.144.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.region-info" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-kms" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::KMS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-lakeformation" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::LakeFormation" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-lambda" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Lambda" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-applicationautoscaling" = "1.144.0" -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-codeguruprofiler" = "1.144.0" -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-ecr" = "1.144.0" -"aws-cdk.aws-ecr-assets" = "1.144.0" -"aws-cdk.aws-efs" = "1.144.0" -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-s3-assets" = "1.144.0" -"aws-cdk.aws-signer" = "1.144.0" -"aws-cdk.aws-sqs" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" -"aws-cdk.region-info" = "1.144.0" +"aws-cdk.aws-applicationautoscaling" = "1.148.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-codeguruprofiler" = "1.148.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-ecr" = "1.148.0" +"aws-cdk.aws-ecr-assets" = "1.148.0" +"aws-cdk.aws-efs" = "1.148.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-s3-assets" = "1.148.0" +"aws-cdk.aws-signer" = "1.148.0" +"aws-cdk.aws-sns" = "1.148.0" +"aws-cdk.aws-sqs" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" +"aws-cdk.region-info" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-logs" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Logs" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-s3-assets" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-s3-assets" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-neptune" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Neptune" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-opensearchservice" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::OpenSearchService" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-certificatemanager" = "1.144.0" -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-route53" = "1.144.0" -"aws-cdk.aws-secretsmanager" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.custom-resources" = "1.144.0" +"aws-cdk.aws-certificatemanager" = "1.148.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-route53" = "1.148.0" +"aws-cdk.aws-secretsmanager" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.custom-resources" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-rds" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::RDS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-secretsmanager" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-secretsmanager" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-redshift" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Redshift" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-lambda" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.aws-secretsmanager" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.custom-resources" = "1.144.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-lambda" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.aws-secretsmanager" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.custom-resources" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-route53" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Route53" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.custom-resources" = "1.144.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.custom-resources" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3-assets" -version = "1.144.0" +version = "1.148.0" description = "Deploy local files and directories to S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-s3" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.assets" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-s3" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sam" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for the AWS Serverless Application Model (SAM) resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-secretsmanager" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::SecretsManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-lambda" = "1.144.0" -"aws-cdk.aws-sam" = "1.144.0" -"aws-cdk.core" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-lambda" = "1.148.0" +"aws-cdk.aws-sam" = "1.148.0" +"aws-cdk.core" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-signer" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::Signer" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.144.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sns" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::SNS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-codestarnotifications" = "1.144.0" -"aws-cdk.aws-events" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.aws-sqs" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-codestarnotifications" = "1.148.0" +"aws-cdk.aws-events" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.aws-sqs" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sqs" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::SQS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-cloudwatch" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ssm" -version = "1.144.0" +version = "1.148.0" description = "The CDK Construct Library for AWS::SSM" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-kms" = "1.144.0" -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-kms" = "1.148.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cloud-assembly-schema" -version = "1.144.0" +version = "1.148.0" description = "Cloud Assembly Schema" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.core" -version = "1.144.0" +version = "1.148.0" description = "AWS Cloud Development Kit Core Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.144.0" -"aws-cdk.cx-api" = "1.144.0" -"aws-cdk.region-info" = "1.144.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +"aws-cdk.cx-api" = "1.148.0" +"aws-cdk.region-info" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.custom-resources" -version = "1.144.0" +version = "1.148.0" description = "Constructs for implementing CDK custom resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudformation" = "1.144.0" -"aws-cdk.aws-ec2" = "1.144.0" -"aws-cdk.aws-iam" = "1.144.0" -"aws-cdk.aws-lambda" = "1.144.0" -"aws-cdk.aws-logs" = "1.144.0" -"aws-cdk.aws-sns" = "1.144.0" -"aws-cdk.core" = "1.144.0" +"aws-cdk.aws-cloudformation" = "1.148.0" +"aws-cdk.aws-ec2" = "1.148.0" +"aws-cdk.aws-iam" = "1.148.0" +"aws-cdk.aws-lambda" = "1.148.0" +"aws-cdk.aws-logs" = "1.148.0" +"aws-cdk.aws-sns" = "1.148.0" +"aws-cdk.core" = "1.148.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cx-api" -version = "1.144.0" +version = "1.148.0" description = "Cloud executable protocol" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.144.0" -jsii = ">=1.52.1,<2.0.0" +"aws-cdk.cloud-assembly-schema" = "1.148.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.region-info" -version = "1.144.0" +version = "1.148.0" description = "AWS region information, such as service principal names" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.52.1,<2.0.0" +jsii = ">=1.54.0,<2.0.0" publication = ">=0.0.3" [[package]] @@ -700,7 +701,7 @@ dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx" [[package]] name = "cattrs" -version = "1.8.0" +version = "1.10.0" description = "Composable complex class support for attrs and dataclasses." category = "main" optional = false @@ -708,17 +709,18 @@ python-versions = ">=3.7,<4.0" [package.dependencies] attrs = ">=20" +typing_extensions = {version = "*", markers = "python_version >= \"3.7\" and python_version < \"3.8\""} [[package]] name = "constructs" -version = "3.3.161" +version = "3.3.239" description = "A programming model for composable configuration" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.37.0,<2.0.0" +jsii = ">=1.55.0,<2.0.0" publication = ">=0.0.3" [[package]] @@ -738,7 +740,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [[package]] name = "jsii" -version = "1.52.1" +version = "1.55.0" description = "Python client for jsii runtime" category = "main" optional = false @@ -783,11 +785,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.1.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "zipp" @@ -804,182 +806,182 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "1638e85d2b87fdbdb239ed675da0b745fe125e87dee9e84c09385b5dce05513f" +content-hash = "9f90e4f68ba2e716c30094305af07863b127dd0b6a80c5df87d41dd27b2716cb" [metadata.files] attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] "aws-cdk.assets" = [ - {file = "aws-cdk.assets-1.144.0.tar.gz", hash = "sha256:4269651cb98e24d0c866fc00e2db07d2bbedbefa9da5639cff88ac0e1bba2bd4"}, - {file = "aws_cdk.assets-1.144.0-py3-none-any.whl", hash = "sha256:c43c00f62f4c17a53ef694054bbe025773ac6cf439c95ced115434f654418f5a"}, + {file = "aws-cdk.assets-1.148.0.tar.gz", hash = "sha256:c786862b78fdf06224c23bda18f28d5f18cbd780cf32d5211608d788b02fa0c9"}, + {file = "aws_cdk.assets-1.148.0-py3-none-any.whl", hash = "sha256:37f45d702ca70cc7e10e94a8aabdba5471bfbcd231f53096c28b37570776f200"}, ] "aws-cdk.aws-acmpca" = [ - {file = "aws-cdk.aws-acmpca-1.144.0.tar.gz", hash = "sha256:a807164e3f2025357b88d4cdab65b84f8781b4517aa536e5b556e4a5b2a41b08"}, - {file = "aws_cdk.aws_acmpca-1.144.0-py3-none-any.whl", hash = "sha256:4b4a43a6d8bf646118d786071f48f420e4333bc518f0c927b95cab430008039d"}, + {file = "aws-cdk.aws-acmpca-1.148.0.tar.gz", hash = "sha256:72b0b84146d077527c4c630d234a50055ebc52747ea0f68f3f1bc0853ef56fe0"}, + {file = "aws_cdk.aws_acmpca-1.148.0-py3-none-any.whl", hash = "sha256:47aa34a106d21c0ad995057fa0d427850bc69aad889b63000074ec50170c26a1"}, ] "aws-cdk.aws-applicationautoscaling" = [ - {file = "aws-cdk.aws-applicationautoscaling-1.144.0.tar.gz", hash = "sha256:d8c05eb9516b705710a415ddc3922353cca0f137489a88524f99618a6980ea3c"}, - {file = "aws_cdk.aws_applicationautoscaling-1.144.0-py3-none-any.whl", hash = "sha256:d2b9ac828db47bdc6e6e4644693f92ef7e528ae10646367239cabbe6ab5b24d0"}, + {file = "aws-cdk.aws-applicationautoscaling-1.148.0.tar.gz", hash = "sha256:3db9a5f1dd4eb16b2155cae95a34051ce3f096993ec0be33de51901230c60ce6"}, + {file = "aws_cdk.aws_applicationautoscaling-1.148.0-py3-none-any.whl", hash = "sha256:7005b070be833c5eb2f621fe93d597cd083352fd5397219ec63d46eca433ef83"}, ] "aws-cdk.aws-autoscaling-common" = [ - {file = "aws-cdk.aws-autoscaling-common-1.144.0.tar.gz", hash = "sha256:58ebdc1200bff4b14688376cbe910ed20eadb04eee59d940434ba47ccfd647f0"}, - {file = "aws_cdk.aws_autoscaling_common-1.144.0-py3-none-any.whl", hash = "sha256:5de9739de50921c71efa3a1d85e6856072225f14c2e2567afa5608180250ddce"}, + {file = "aws-cdk.aws-autoscaling-common-1.148.0.tar.gz", hash = "sha256:7b104e309737acd3146491c4cba9b70b6243bf9d6aefb2f6587e93c35d1bc107"}, + {file = "aws_cdk.aws_autoscaling_common-1.148.0-py3-none-any.whl", hash = "sha256:b1e87e6304b2f5517b56bb7e21a087365c5812f31f14945fc600c53fbe558880"}, ] "aws-cdk.aws-certificatemanager" = [ - {file = "aws-cdk.aws-certificatemanager-1.144.0.tar.gz", hash = "sha256:73dbc3811ab67baf3b89f21bbaf8640f2437609bfa25033349044a8db28d13ed"}, - {file = "aws_cdk.aws_certificatemanager-1.144.0-py3-none-any.whl", hash = "sha256:95a568df2a9779c641c3da10c4f958d4336c07ae87859bc627167308957d844b"}, + {file = "aws-cdk.aws-certificatemanager-1.148.0.tar.gz", hash = "sha256:3ef28550f8724fd60bf1ddfccfd9f2727947fffa27079c5f841d64e7f2c2bddf"}, + {file = "aws_cdk.aws_certificatemanager-1.148.0-py3-none-any.whl", hash = "sha256:dfd018cf0c079b3862b79532c07ab91417f34f23eeb806904f7d195968ddc2ce"}, ] "aws-cdk.aws-cloudformation" = [ - {file = "aws-cdk.aws-cloudformation-1.144.0.tar.gz", hash = "sha256:d6aa003c0ac6b933f2d83784f167179c7a3f6247dbac4218a82ff0c42f2ea728"}, - {file = "aws_cdk.aws_cloudformation-1.144.0-py3-none-any.whl", hash = "sha256:f31770f0eb5bf9531566473a39618e6fb8f319641e3f504bedef02ae6b318d62"}, + {file = "aws-cdk.aws-cloudformation-1.148.0.tar.gz", hash = "sha256:f0d6f17eb6831bafda8bbcf106ec700ecf9c48c5b7def4675e4365f410b07581"}, + {file = "aws_cdk.aws_cloudformation-1.148.0-py3-none-any.whl", hash = "sha256:2e0794b8d35b5d0efca4330810bb7882639d80672dc9a6f1531fae80b2997236"}, ] "aws-cdk.aws-cloudwatch" = [ - {file = "aws-cdk.aws-cloudwatch-1.144.0.tar.gz", hash = "sha256:6368a80900c58f675b6e9180697e328f03f1be5ab0a31e15d77340835bb3ab31"}, - {file = "aws_cdk.aws_cloudwatch-1.144.0-py3-none-any.whl", hash = "sha256:ef12cd5dcd9d11a4ab718263c04ba2e282f09fee293f3dbecc7934b6f2b4c51b"}, + {file = "aws-cdk.aws-cloudwatch-1.148.0.tar.gz", hash = "sha256:c185339b527d82066a9948c5c5e4e18a90dba4cae15d78b5e7f5af76de38d2ae"}, + {file = "aws_cdk.aws_cloudwatch-1.148.0-py3-none-any.whl", hash = "sha256:981003c97f7268e8acbc51a792fa2f4fb96c7b4dcff2e33230f389dcdff0e1e7"}, ] "aws-cdk.aws-codeguruprofiler" = [ - {file = "aws-cdk.aws-codeguruprofiler-1.144.0.tar.gz", hash = "sha256:20e039a32e38770628b9d19bbb164fa3a0de0902d5908adbf090a34006803a05"}, - {file = "aws_cdk.aws_codeguruprofiler-1.144.0-py3-none-any.whl", hash = "sha256:c09d6ecb7f40c85ce55067397a5220800e16cc870b69fa6add72208748a3b758"}, + {file = "aws-cdk.aws-codeguruprofiler-1.148.0.tar.gz", hash = "sha256:5959a3d9800ae15299b1532bfd1da9098a2fec4a64fa12324427f2159ca34106"}, + {file = "aws_cdk.aws_codeguruprofiler-1.148.0-py3-none-any.whl", hash = "sha256:22cf0244a5522eba69dcb839a25735630effd128d6cb14a24f841f9cba09f3df"}, ] "aws-cdk.aws-codestarnotifications" = [ - {file = "aws-cdk.aws-codestarnotifications-1.144.0.tar.gz", hash = "sha256:afa8e2ea5c09b330e81fe9d52c5a14d69e2b88435ff5222ac48629f8fb902fc9"}, - {file = "aws_cdk.aws_codestarnotifications-1.144.0-py3-none-any.whl", hash = "sha256:5d0baa762350f0313fb6d308e059e8f4e1c9ceffdcc39f3d949d1111c5b4f5e9"}, + {file = "aws-cdk.aws-codestarnotifications-1.148.0.tar.gz", hash = "sha256:849f1b5278f8b7d60932ae93b7955966590bd96c534fe33c6a02916daff5624d"}, + {file = "aws_cdk.aws_codestarnotifications-1.148.0-py3-none-any.whl", hash = "sha256:7b4c1a307973a0a495e38814fa3fc2cc56b9ae84b8046791d37527d832ff2ac0"}, ] "aws-cdk.aws-ec2" = [ - {file = "aws-cdk.aws-ec2-1.144.0.tar.gz", hash = "sha256:19984399f8c444bee71f843c082ced51676b76df10ef5e7b6924d94c051799ed"}, - {file = "aws_cdk.aws_ec2-1.144.0-py3-none-any.whl", hash = "sha256:a8c77b256e89214d90b4a34e882369731c2271570f96455d77661a483f761284"}, + {file = "aws-cdk.aws-ec2-1.148.0.tar.gz", hash = "sha256:92d9b31b6b8797da5f8765a9691d71e8009421a5a380c4afc0318fdd5ebf365f"}, + {file = "aws_cdk.aws_ec2-1.148.0-py3-none-any.whl", hash = "sha256:eb2d7a39d878d2fe72ebbf570503662863e960c5965bd71e9cdf04b2953428c2"}, ] "aws-cdk.aws-ecr" = [ - {file = "aws-cdk.aws-ecr-1.144.0.tar.gz", hash = "sha256:673cc596b60708525a0f4a7c6489f05ff66aa8b8c41aba12c5403cbe0944e3ca"}, - {file = "aws_cdk.aws_ecr-1.144.0-py3-none-any.whl", hash = "sha256:334f0397e7da4abdbacc341b351f46be8106031a6f91639eef1503d4849c35b8"}, + {file = "aws-cdk.aws-ecr-1.148.0.tar.gz", hash = "sha256:9a9524db3b066a575c7e6a3d48f4487535990ba67cbee18f61c2013accecd84c"}, + {file = "aws_cdk.aws_ecr-1.148.0-py3-none-any.whl", hash = "sha256:d35dbedb1534619060822f91238a534a54c1aba4f37bd74c5b36b094ff97ce44"}, ] "aws-cdk.aws-ecr-assets" = [ - {file = "aws-cdk.aws-ecr-assets-1.144.0.tar.gz", hash = "sha256:5a9a94137da9756982e39e2ccc1499811c42d873b1c617c9cfc47b973b66191a"}, - {file = "aws_cdk.aws_ecr_assets-1.144.0-py3-none-any.whl", hash = "sha256:e226da9046db86cf1d70efc1147b08f129e7e767204eb63a033b5132eeef0d26"}, + {file = "aws-cdk.aws-ecr-assets-1.148.0.tar.gz", hash = "sha256:8f2242dbf91fe7a031bb5c35fd3621eb4770f8797a5b48ec909151ae30d0c691"}, + {file = "aws_cdk.aws_ecr_assets-1.148.0-py3-none-any.whl", hash = "sha256:626cdf326b126ce1bac0e5d11ff41097a029ec0f92212cfac4a118170c2fbec2"}, ] "aws-cdk.aws-efs" = [ - {file = "aws-cdk.aws-efs-1.144.0.tar.gz", hash = "sha256:47ac47fd80a5f9cb6b774c37f8171504ebf2cd67ff7b13326dd2a0cef9662805"}, - {file = "aws_cdk.aws_efs-1.144.0-py3-none-any.whl", hash = "sha256:02f233eb82111eb5c22c118e26801cde52122ff813ea1bfcc8fc0e708acb0e94"}, + {file = "aws-cdk.aws-efs-1.148.0.tar.gz", hash = "sha256:671fe5fbfeb74165e90bbc14723e8ebb3019a19ac2947931b9b96d8fc3ed34e2"}, + {file = "aws_cdk.aws_efs-1.148.0-py3-none-any.whl", hash = "sha256:b178ef83310427abf4ac2eae36fcce5ac8a5dfaea7e7054f6aea423a32b23112"}, ] "aws-cdk.aws-events" = [ - {file = "aws-cdk.aws-events-1.144.0.tar.gz", hash = "sha256:6fd7ab316d3efff508e3dd232dacb73729deaf4a537f4b02e2540447f05c5519"}, - {file = "aws_cdk.aws_events-1.144.0-py3-none-any.whl", hash = "sha256:9f15ac7967ad3b03da819484112851b47b376b071b9fe70fe1e1432fa4564c5f"}, + {file = "aws-cdk.aws-events-1.148.0.tar.gz", hash = "sha256:b5575bccce915c491f3f9f3597b6d2f9bc3c600dd396201212d0ac0bf9567f6b"}, + {file = "aws_cdk.aws_events-1.148.0-py3-none-any.whl", hash = "sha256:60846d46e859b42abc32e0fdacf15854a3e64d203f11551e71e0e690d484f1a8"}, ] "aws-cdk.aws-glue" = [ - {file = "aws-cdk.aws-glue-1.144.0.tar.gz", hash = "sha256:f9abf47d096196e8a04caae969ef637d155f8d986b4f19149754bdb582a7ab4e"}, - {file = "aws_cdk.aws_glue-1.144.0-py3-none-any.whl", hash = "sha256:6c30c8b649fa82f217d0850111ab2335125c7087ef1ab6e464fd2bdadac70b4d"}, + {file = "aws-cdk.aws-glue-1.148.0.tar.gz", hash = "sha256:018d84ecc8ecc77947ffbf6ae0615538a0249c0cdf16cbca97245e595c787fd0"}, + {file = "aws_cdk.aws_glue-1.148.0-py3-none-any.whl", hash = "sha256:b885bfe9e65f774c987332e1579d63de704b1dc8e7fd1c1b3f26cff05aae2160"}, ] "aws-cdk.aws-iam" = [ - {file = "aws-cdk.aws-iam-1.144.0.tar.gz", hash = "sha256:f9c46e54c3fc10bb4246b8df41d4470c74850d7a1ca9a8094e552e0adbde2964"}, - {file = "aws_cdk.aws_iam-1.144.0-py3-none-any.whl", hash = "sha256:e32ced31e07af61fd0bd7a50d75b72afc224c8fd106fc3c37a60a288120961ab"}, + {file = "aws-cdk.aws-iam-1.148.0.tar.gz", hash = "sha256:100535871b86d7a639a9406b7903a538c9d99ffb37459532ec366fa29ca81872"}, + {file = "aws_cdk.aws_iam-1.148.0-py3-none-any.whl", hash = "sha256:15fb4515a8d92dcc5db397c9ff7195ec9545b4bbe76d9f39edaddb9e8d59d70c"}, ] "aws-cdk.aws-kms" = [ - {file = "aws-cdk.aws-kms-1.144.0.tar.gz", hash = "sha256:55cd1ee139de85747d6acc1911b80892888535f7a0b5bbf2a32c5cca52feb26c"}, - {file = "aws_cdk.aws_kms-1.144.0-py3-none-any.whl", hash = "sha256:ac27f11fe74ac6d3fcdf12f1c2514ddd7fad330b4cd08ba4baf4aa392fae2f4e"}, + {file = "aws-cdk.aws-kms-1.148.0.tar.gz", hash = "sha256:9eeb5e5308a57d2e231527bc0bdde2d7c9f3e4082a5f03a829f84f5031f7b0c5"}, + {file = "aws_cdk.aws_kms-1.148.0-py3-none-any.whl", hash = "sha256:e430666831993b26cd1b28e0aca8656f0f3e738c6d308059d356b4ec0f7fc162"}, ] "aws-cdk.aws-lakeformation" = [ - {file = "aws-cdk.aws-lakeformation-1.144.0.tar.gz", hash = "sha256:c4da9098e61361c5b334d399282d8f21351548aa3fbbc47b76d1fd017100d11b"}, - {file = "aws_cdk.aws_lakeformation-1.144.0-py3-none-any.whl", hash = "sha256:66fdcb11f4bab3237d16c47d400c73ec01393333d644ba6d58a81ce9f0feb8ca"}, + {file = "aws-cdk.aws-lakeformation-1.148.0.tar.gz", hash = "sha256:35793a07cde6c26b6e43ea929563e25802b00159ed7be72bd3fb90d8fa31170c"}, + {file = "aws_cdk.aws_lakeformation-1.148.0-py3-none-any.whl", hash = "sha256:398f7a2887a80120ca5a92c3b7a42041e729dd8caf4150182616336e4f0d27b2"}, ] "aws-cdk.aws-lambda" = [ - {file = "aws-cdk.aws-lambda-1.144.0.tar.gz", hash = "sha256:d653487b9c8bbd16063a1b61ec533d06fe8ab1b10d1eb367ff2057ffb983609e"}, - {file = "aws_cdk.aws_lambda-1.144.0-py3-none-any.whl", hash = "sha256:357c59f8eaad933b7f9903b46047488d5eab4d2f85fdb6dad4e1d55ea9dcbd8a"}, + {file = "aws-cdk.aws-lambda-1.148.0.tar.gz", hash = "sha256:160edb2000a273c827c4b17b86bec5333db349b4ee24f3d90c98b5dcf4d5d90f"}, + {file = "aws_cdk.aws_lambda-1.148.0-py3-none-any.whl", hash = "sha256:9f1ac7900fadead264c67ba816e68cb2c923c6636bc5018af46d59cc473e1eec"}, ] "aws-cdk.aws-logs" = [ - {file = "aws-cdk.aws-logs-1.144.0.tar.gz", hash = "sha256:5212b0adc155dd5fc244aaec8ffed74912bc4f06c0e0281d83d40e51e7e438a8"}, - {file = "aws_cdk.aws_logs-1.144.0-py3-none-any.whl", hash = "sha256:98253c9c5e1175866ea13886534ec3d091406be9386d01173388304bf5a26b71"}, + {file = "aws-cdk.aws-logs-1.148.0.tar.gz", hash = "sha256:a63966962fbf79bbed974b1385954fa5b1adcf166ce59b07e4bc44dcaed9ca68"}, + {file = "aws_cdk.aws_logs-1.148.0-py3-none-any.whl", hash = "sha256:2c7fe4395c5f875acb7bf93c9120553fe1306f5308084f832170a9c38c26be1d"}, ] "aws-cdk.aws-neptune" = [ - {file = "aws-cdk.aws-neptune-1.144.0.tar.gz", hash = "sha256:254d2f9bade919f915d6ead2ec2a34364f3554086b51bad599cb973c07a39926"}, - {file = "aws_cdk.aws_neptune-1.144.0-py3-none-any.whl", hash = "sha256:81822ccb35b45d1e2cf905f9fd5dcf8108d63fa646767cdb0cd7a740eb1364f3"}, + {file = "aws-cdk.aws-neptune-1.148.0.tar.gz", hash = "sha256:98cb27f33d0821d0b9432b2f807f6d42bf3fcc5b68beaf3b14c2f1cf8f1b2602"}, + {file = "aws_cdk.aws_neptune-1.148.0-py3-none-any.whl", hash = "sha256:81016e389af8aa8baeacaf642cb4fd0620676059989f14bbf39dea921a5ca9f6"}, ] "aws-cdk.aws-opensearchservice" = [ - {file = "aws-cdk.aws-opensearchservice-1.144.0.tar.gz", hash = "sha256:70ca76b67e7731009cf14649992b98d7556becd728dd3c3ce946a38ecb776e1e"}, - {file = "aws_cdk.aws_opensearchservice-1.144.0-py3-none-any.whl", hash = "sha256:1624a09871302779ad4a130671c6254064bf59a1ad314ff7f3d07922543c328f"}, + {file = "aws-cdk.aws-opensearchservice-1.148.0.tar.gz", hash = "sha256:d95aada4dd6c6ac487ed56af0625721f94ff02ff2e227a65f4ea5d2843a8c5a3"}, + {file = "aws_cdk.aws_opensearchservice-1.148.0-py3-none-any.whl", hash = "sha256:4878ba34a85411023fc284cbc205165ac2b9b972dba8129235372c496002897e"}, ] "aws-cdk.aws-rds" = [ - {file = "aws-cdk.aws-rds-1.144.0.tar.gz", hash = "sha256:ad26c542c4fe6adb776d75b52f2891f90ace6adaa8b2ffc043eddf50dba86004"}, - {file = "aws_cdk.aws_rds-1.144.0-py3-none-any.whl", hash = "sha256:959ba156ac8ffcaf6421e164eb78d00feb7561518a7944e74f8a368735761e18"}, + {file = "aws-cdk.aws-rds-1.148.0.tar.gz", hash = "sha256:16c65384cacec36cceeae8bff66b5ac0e0f9dcfda0f6d2f6a751589cef1b855a"}, + {file = "aws_cdk.aws_rds-1.148.0-py3-none-any.whl", hash = "sha256:0ad4a4eb06b43274952edeb4c270fe4965b868962a5cee5441a8800031d25050"}, ] "aws-cdk.aws-redshift" = [ - {file = "aws-cdk.aws-redshift-1.144.0.tar.gz", hash = "sha256:9bb00eb16c77f5a0c1fc01708b4b5220853a53d6fe962050a040790c833ba37e"}, - {file = "aws_cdk.aws_redshift-1.144.0-py3-none-any.whl", hash = "sha256:0340befcbabeec3f4d91cd6db4dd53bb410abf88c8c29ca48af897a4e3747531"}, + {file = "aws-cdk.aws-redshift-1.148.0.tar.gz", hash = "sha256:f652fadf4e756e680cb92fd4bbade49c0f3f07d2ea2a5d04d8b29264c72a90ad"}, + {file = "aws_cdk.aws_redshift-1.148.0-py3-none-any.whl", hash = "sha256:7ea7f922bfdeabf162cea76c2f79c9846851605e78c2fd3ad3b2d23e99c041f8"}, ] "aws-cdk.aws-route53" = [ - {file = "aws-cdk.aws-route53-1.144.0.tar.gz", hash = "sha256:fabc01b6976a8f755bd48142c6f166f32d5c87f7309b146cecd9a6dba86d8963"}, - {file = "aws_cdk.aws_route53-1.144.0-py3-none-any.whl", hash = "sha256:524dc3f4b676c75a6beaf3943fb8ac5392de918d55edc8f4938f33db09e1ee7d"}, + {file = "aws-cdk.aws-route53-1.148.0.tar.gz", hash = "sha256:335afd7eaa518122027aff0a3842ecbde9e75ff4c0ae5dee57cf6750576e844a"}, + {file = "aws_cdk.aws_route53-1.148.0-py3-none-any.whl", hash = "sha256:e171b3d9df7ec1c1c9893d2ce3124f0aff91728ac018548070c12bffa7e7feaf"}, ] "aws-cdk.aws-s3" = [ - {file = "aws-cdk.aws-s3-1.144.0.tar.gz", hash = "sha256:88737f283e49ae970786d9c059bd50f43160acaeac510aec473dfda7ef5597f6"}, - {file = "aws_cdk.aws_s3-1.144.0-py3-none-any.whl", hash = "sha256:1d72191f899415de71a3640917be4f5da0a37ceeb4659a773b2a5a2fd8d1105e"}, + {file = "aws-cdk.aws-s3-1.148.0.tar.gz", hash = "sha256:24274f28d2351dbb98319b24cb02e79825202fde0be87d7221ded89048f6add8"}, + {file = "aws_cdk.aws_s3-1.148.0-py3-none-any.whl", hash = "sha256:5d8f1cb8a595af2a352fa5298e5b6644d9db2fd0961c3c5e5b412c7199c2c005"}, ] "aws-cdk.aws-s3-assets" = [ - {file = "aws-cdk.aws-s3-assets-1.144.0.tar.gz", hash = "sha256:f27be123c988fe1f1637a2352952be831ade5f6b490e5530007fd564bbfe8018"}, - {file = "aws_cdk.aws_s3_assets-1.144.0-py3-none-any.whl", hash = "sha256:5d329fac1be62240fae51b15dd8e26e78e962b088c44d5ccd112b8678a3aba13"}, + {file = "aws-cdk.aws-s3-assets-1.148.0.tar.gz", hash = "sha256:2b84b33b4181f4606d8a1a249b51857030d69e1b4fc78abd5c45242b46b5d28a"}, + {file = "aws_cdk.aws_s3_assets-1.148.0-py3-none-any.whl", hash = "sha256:b008adcb99fd016e394573bd00f546b91b8182b6ee7f40e354ecf74762220b51"}, ] "aws-cdk.aws-sam" = [ - {file = "aws-cdk.aws-sam-1.144.0.tar.gz", hash = "sha256:2c20bc68e51a024ce3dc0e0cabed79772f37d20a8de1b61e228461f4951d1aad"}, - {file = "aws_cdk.aws_sam-1.144.0-py3-none-any.whl", hash = "sha256:96563a6a64126096ecd60d2dc8dc36471f04a18ae9d7371532767020f18a8cc3"}, + {file = "aws-cdk.aws-sam-1.148.0.tar.gz", hash = "sha256:3c0ff5c4b5e1c7995a4708313814280fd61e0f2114ca0f08ce151f53525ee202"}, + {file = "aws_cdk.aws_sam-1.148.0-py3-none-any.whl", hash = "sha256:7b3df8068e4ff6797ef73ed27c404c6551347359211e30cbfb6d70d26e7f1852"}, ] "aws-cdk.aws-secretsmanager" = [ - {file = "aws-cdk.aws-secretsmanager-1.144.0.tar.gz", hash = "sha256:d4f73db193f52f4f405b48b7f8be454011b11164ba7e81c212c1f89c078e5fde"}, - {file = "aws_cdk.aws_secretsmanager-1.144.0-py3-none-any.whl", hash = "sha256:d60af58aeb26a7f217d80e4e2d161a3692a196598f69840a4dde8f2979eb3895"}, + {file = "aws-cdk.aws-secretsmanager-1.148.0.tar.gz", hash = "sha256:7be5554509806a9a2363acd124c7bf9af795a44625c2764fc07c52e24e7e3fb3"}, + {file = "aws_cdk.aws_secretsmanager-1.148.0-py3-none-any.whl", hash = "sha256:9ffbe7093604f4680779b286c1e86a34bf45b41dcf5e70ea8186eb205cb2ab60"}, ] "aws-cdk.aws-signer" = [ - {file = "aws-cdk.aws-signer-1.144.0.tar.gz", hash = "sha256:bd0199b83178d5852b58670cc62e8a36c8a970bbd217825f77c8ded140725637"}, - {file = "aws_cdk.aws_signer-1.144.0-py3-none-any.whl", hash = "sha256:939f99d364a28758d478b9646b35b4e83d2a514f21bb1aa66742399077cb87ce"}, + {file = "aws-cdk.aws-signer-1.148.0.tar.gz", hash = "sha256:96b3337ff03e8254aeca9e6282217e6be00a82611a974fbe5970e5e5c475d120"}, + {file = "aws_cdk.aws_signer-1.148.0-py3-none-any.whl", hash = "sha256:b12079cf55c7e6a4a7ec4c926bf528ae371701b13544e4d1a123190e3113625a"}, ] "aws-cdk.aws-sns" = [ - {file = "aws-cdk.aws-sns-1.144.0.tar.gz", hash = "sha256:c261aa28669d036d58f9e6999ed979e6a1843258969536076a420bb5a3981828"}, - {file = "aws_cdk.aws_sns-1.144.0-py3-none-any.whl", hash = "sha256:ed4f3e464721aeb803e876037b26ad107f9754858b8cbf09152f5ae16649ebd4"}, + {file = "aws-cdk.aws-sns-1.148.0.tar.gz", hash = "sha256:7ad75ed8d6de304e482d93f567cbbc7341a42cc25e4270880323ca0c57f22085"}, + {file = "aws_cdk.aws_sns-1.148.0-py3-none-any.whl", hash = "sha256:996482bc3ddd0199165a7240f6406e595ea9d3af3c6d34ea9118ae376ef101b8"}, ] "aws-cdk.aws-sqs" = [ - {file = "aws-cdk.aws-sqs-1.144.0.tar.gz", hash = "sha256:4b54adbc1da6a91fd575845fa6e7fbca4de5e858df21caf81636086b256a00b3"}, - {file = "aws_cdk.aws_sqs-1.144.0-py3-none-any.whl", hash = "sha256:9b57c1a402865251df9f41f12341d2a38647b6bd0948c760a686d357b3edcf27"}, + {file = "aws-cdk.aws-sqs-1.148.0.tar.gz", hash = "sha256:d60507533610d24cb0a3f9888b2678e169ab8db9c207def50b5acba5e77a9f68"}, + {file = "aws_cdk.aws_sqs-1.148.0-py3-none-any.whl", hash = "sha256:62d8eb222f3928124c7178b6733212f4417a076fe3c778fc649e2ffcc66483cf"}, ] "aws-cdk.aws-ssm" = [ - {file = "aws-cdk.aws-ssm-1.144.0.tar.gz", hash = "sha256:7ead4b75103261a5356bacd9b70f7eb680e30b5d2f9cd7977b89f012c3919cd2"}, - {file = "aws_cdk.aws_ssm-1.144.0-py3-none-any.whl", hash = "sha256:87ff66213ebe0d5ea7adbc5909887c8bbe4024d88037fd5231523f81053f834f"}, + {file = "aws-cdk.aws-ssm-1.148.0.tar.gz", hash = "sha256:e120fcef439e91fb5c15d2f841730acc1e20b4755289a94fae52b12108b5e197"}, + {file = "aws_cdk.aws_ssm-1.148.0-py3-none-any.whl", hash = "sha256:cbddca0c74203b210468ec7b7e9815fd0a9e67767a4f96694ebf2e3a9dc8954a"}, ] "aws-cdk.cloud-assembly-schema" = [ - {file = "aws-cdk.cloud-assembly-schema-1.144.0.tar.gz", hash = "sha256:838610bcbb098ea5f700a032ce3bad8b630295f72b2cbb67fe25ea413394049d"}, - {file = "aws_cdk.cloud_assembly_schema-1.144.0-py3-none-any.whl", hash = "sha256:ca29af10806f5bf33563daebb01ec221862c6981de200605ff911a8d70aec81a"}, + {file = "aws-cdk.cloud-assembly-schema-1.148.0.tar.gz", hash = "sha256:a031a14740209eba20158e3fa19cf154b897005bc3178ae5bffdf54e5ac29c0a"}, + {file = "aws_cdk.cloud_assembly_schema-1.148.0-py3-none-any.whl", hash = "sha256:a13c65c434c4cb9c6da7575f5570122a7c27b665fe0881b9a6c77fcaefb037c8"}, ] "aws-cdk.core" = [ - {file = "aws-cdk.core-1.144.0.tar.gz", hash = "sha256:165a20f2d5cc3d6a41d97b5dfd66f9b6e9021e6657c20d282486803195fe73b0"}, - {file = "aws_cdk.core-1.144.0-py3-none-any.whl", hash = "sha256:d179cb4f7deb5f68436c67efb025372e095c5da0659d113476a3635a5352f8d1"}, + {file = "aws-cdk.core-1.148.0.tar.gz", hash = "sha256:78c80d483ad1319ac532571af697ad184cdcc6352ae9c620f1515e157399d25a"}, + {file = "aws_cdk.core-1.148.0-py3-none-any.whl", hash = "sha256:9e5e73826e6f3d50290b0819d182cdbe095d84fcb54210d9ecd709a9d8c4681d"}, ] "aws-cdk.custom-resources" = [ - {file = "aws-cdk.custom-resources-1.144.0.tar.gz", hash = "sha256:18639c531ff0c871c7394a728bdfc06dcb910ad6670811500be61f6c663f7b2c"}, - {file = "aws_cdk.custom_resources-1.144.0-py3-none-any.whl", hash = "sha256:b30702665d7810118fde9ef86541a9c8a537d6975dcfca24284cc7b2acb37fe2"}, + {file = "aws-cdk.custom-resources-1.148.0.tar.gz", hash = "sha256:fa533e42bda9ac333f78d009f046a12ba69e7ba8c35f3bf65846e0cb26c6e0e7"}, + {file = "aws_cdk.custom_resources-1.148.0-py3-none-any.whl", hash = "sha256:3d3e609aa57a718dcd1c83bd25f661d717e20d5bb6f5095723b4077ff86645a7"}, ] "aws-cdk.cx-api" = [ - {file = "aws-cdk.cx-api-1.144.0.tar.gz", hash = "sha256:ecfe58cee0fbad6ee225b1658f8b717e8df5c0edb8e05b2ed905e3a4c0fe8b99"}, - {file = "aws_cdk.cx_api-1.144.0-py3-none-any.whl", hash = "sha256:eaf6730a615bf275af3cd283708c7bafa731cb32279baf253f35a42d4735c2fa"}, + {file = "aws-cdk.cx-api-1.148.0.tar.gz", hash = "sha256:403d4b11d88e1cb79f00028a331cb792e74d3d1b0a47cbc93840732c6d9eaa50"}, + {file = "aws_cdk.cx_api-1.148.0-py3-none-any.whl", hash = "sha256:83d14f3078430d9a455013ab30d3e374bc5f3617fc821dd4a75b775e774c9da8"}, ] "aws-cdk.region-info" = [ - {file = "aws-cdk.region-info-1.144.0.tar.gz", hash = "sha256:83fd5b133a8ce0d7a39da3d8c66ee90cf139fdfe825ea14af7ef52dbd4c944db"}, - {file = "aws_cdk.region_info-1.144.0-py3-none-any.whl", hash = "sha256:5236ede32213866591fe3f96484913654df350d7f6633b2b1d61d6ca5b35bde5"}, + {file = "aws-cdk.region-info-1.148.0.tar.gz", hash = "sha256:bad2c7a9bbf4e7550040dad2c95618e155652b74c0410e471fd3cd0fc3f9f4e3"}, + {file = "aws_cdk.region_info-1.148.0-py3-none-any.whl", hash = "sha256:411f14a98c0a43e63b7dc37cd274acab977b93a8c424fcf2589686fbaa719def"}, ] cattrs = [ {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, {file = "cattrs-1.0.0.tar.gz", hash = "sha256:b7ab5cf8ad127c42eefd01410c1c6e28569a45a255ea80ed968511873c433c7a"}, - {file = "cattrs-1.8.0-py3-none-any.whl", hash = "sha256:901fb2040529ae8fc9d93f48a2cdf7de3e983312ffb2a164ffa4e9847f253af1"}, - {file = "cattrs-1.8.0.tar.gz", hash = "sha256:5c121ab06a7cac494813c228721a7feb5a6423b17316eeaebf13f5a03e5b0d53"}, + {file = "cattrs-1.10.0-py3-none-any.whl", hash = "sha256:35dd9063244263e63bd0bd24ea61e3015b00272cead084b2c40d788b0f857c46"}, + {file = "cattrs-1.10.0.tar.gz", hash = "sha256:211800f725cdecedcbcf4c753bbd22d248312b37d130f06045434acb7d9b34e1"}, ] constructs = [ - {file = "constructs-3.3.161-py3-none-any.whl", hash = "sha256:3215f2a3628584ad8e6a5ebabf4e1cc0b125367f2347e6fa0d9ccfd735ac2bbb"}, - {file = "constructs-3.3.161.tar.gz", hash = "sha256:2b33c412ff0f1d21205d85f778e4594a35c9c98b65cb47fea7533fbe40de1730"}, + {file = "constructs-3.3.239-py3-none-any.whl", hash = "sha256:df09d47f15a7dbd4865c794da5c9e8d4b867f1c3dc7aef5a828f68890525818e"}, + {file = "constructs-3.3.239.tar.gz", hash = "sha256:792ecde6d465c7bca090e41f40b76798358f000c6800b55435d0b51cf8d8e9e4"}, ] importlib-resources = [ {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] jsii = [ - {file = "jsii-1.52.1-py3-none-any.whl", hash = "sha256:c1477f17275d62fbda28ea25c1f0a6cd1d95c1b73dda70835f33e34f0b70ac52"}, - {file = "jsii-1.52.1.tar.gz", hash = "sha256:32f886e99c06a23943986e9580f553860aabeb91dc3a3520d5b76fef1e631c04"}, + {file = "jsii-1.55.0-py3-none-any.whl", hash = "sha256:68a25d3b5fcb21a7bf9b58012ce7eb7e0c00c5654efed9294598a457d19bf9d7"}, + {file = "jsii-1.55.0.tar.gz", hash = "sha256:143188278318eaf74c11cb9c9beca91c3151488f001eae85f358d9b492a56642"}, ] publication = [ {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, @@ -994,9 +996,8 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] zipp = [ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, diff --git a/test_infra/pyproject.toml b/test_infra/pyproject.toml index a83502983..f2bc420e6 100644 --- a/test_infra/pyproject.toml +++ b/test_infra/pyproject.toml @@ -7,17 +7,17 @@ license = "Apache License 2.0" [tool.poetry.dependencies] python = ">=3.6.2, <3.11" -"aws-cdk.core" = "^1.124.0" -"aws-cdk.aws-ec2" = "^1.124.0" -"aws-cdk.aws-glue" = "^1.124.0" -"aws-cdk.aws-iam" = "^1.124.0" -"aws-cdk.aws-kms" = "^1.124.0" -"aws-cdk.aws-logs" = "^1.124.0" -"aws-cdk.aws-s3" = "^1.124.0" -"aws-cdk.aws-redshift" = "^1.124.0" -"aws-cdk.aws-rds" = "^1.124.0" -"aws-cdk.aws-secretsmanager" = "^1.124.0" -"aws-cdk.aws-ssm" = "^1.124.0" -"aws-cdk.aws-opensearchservice" = "^1.124.0" -"aws-cdk.aws-lakeformation" = "^1.124.0" -"aws-cdk.aws-neptune" = "^1.144.0" +"aws-cdk.core" = "^1.148.0" +"aws-cdk.aws-ec2" = "^1.148.0" +"aws-cdk.aws-glue" = "^1.148.0" +"aws-cdk.aws-iam" = "^1.148.0" +"aws-cdk.aws-kms" = "^1.148.0" +"aws-cdk.aws-logs" = "^1.148.0" +"aws-cdk.aws-s3" = "^1.148.0" +"aws-cdk.aws-redshift" = "^1.148.0" +"aws-cdk.aws-rds" = "^1.148.0" +"aws-cdk.aws-secretsmanager" = "^1.148.0" +"aws-cdk.aws-ssm" = "^1.148.0" +"aws-cdk.aws-opensearchservice" = "^1.148.0" +"aws-cdk.aws-lakeformation" = "^1.148.0" +"aws-cdk.aws-neptune" = "^1.148.0" From 0ae781b574c93c1c2af2d31063cbac1e6a6a7d57 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Fri, 11 Mar 2022 16:53:30 +0000 Subject: [PATCH 20/32] Fix pydocstyle --- awswrangler/neptune/_utils.py | 34 +++++---- awswrangler/neptune/client.py | 100 ++++++++++++++++---------- awswrangler/neptune/gremlin_parser.py | 16 +++-- awswrangler/neptune/neptune.py | 42 +++++++---- 4 files changed, 118 insertions(+), 74 deletions(-) diff --git a/awswrangler/neptune/_utils.py b/awswrangler/neptune/_utils.py index a2e744ab4..e46d5426d 100644 --- a/awswrangler/neptune/_utils.py +++ b/awswrangler/neptune/_utils.py @@ -1,4 +1,4 @@ -"""Amazon Neptune Utils Module (PRIVATE)""" +"""Amazon Neptune Utils Module (PRIVATE).""" import logging from enum import Enum @@ -16,7 +16,7 @@ class WriteDFType(Enum): - """Dataframe type enum""" + """Dataframe type enum.""" VERTEX = 1 EDGE = 2 @@ -24,19 +24,23 @@ class WriteDFType(Enum): def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, batch_size: int) -> bool: - """Write the provided dataframe using Gremlin - - Args: - client (NeptuneClient): The Neptune client to write the dataframe - df (pd.DataFrame): The dataframe to write - mode (WriteDFType): The type of dataframe to write - batch_size (int): The size of the batch to write - - Raises: - Exception: The underlying write exception is raised - - Returns: - (bool): True if the write operation succeeded + """Write the provided dataframe using Gremlin. + + Parameters + ---------- + client : NeptuneClient + The Neptune client to write the dataframe + df : pd.DataFrame + The dataframe to write + mode : WriteDFType + The type of dataframe to write + batch_size : int + The size of the batch to write + + Returns + ------- + bool + True if the write operation succeeded """ g = Graph().traversal() # Loop through items in the DF diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index ae4edca5b..e5a7b6ddd 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -1,4 +1,4 @@ -"""Amazon NeptuneClient Module""" +"""Amazon NeptuneClient Module.""" import logging from typing import Any, Optional @@ -23,7 +23,7 @@ class NeptuneClient: - """This object represents a Neptune cluster connection.""" + """Class representing a Neptune cluster connection.""" def __init__( self, @@ -111,14 +111,19 @@ def _get_aws_request( return req def read_opencypher(self, query: str, headers: Any = None) -> Any: - """Executes the provided openCypher query - - Args: - query (str): The query to execute - headers (Any, optional): Any additional headers that should be associated with the query. Defaults to None. - - Returns: - Any: [description] The result of the query + """Execute the provided openCypher query. + + Parameters + ---------- + query : str + The query to execute + headers : Any, optional + Any additional headers that should be associated with the query. Defaults to None. + + Returns + ------- + Any + The result of the query. """ if headers is None: headers = {} @@ -137,24 +142,31 @@ def read_opencypher(self, query: str, headers: Any = None) -> Any: raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def read_gremlin(self, query: str, headers: Any = None) -> Any: - """Executes the provided Gremlin traversal and returns the results + """Execute the provided Gremlin traversal and returns the results. - Args: - query (str): The Gremlin query + Parameters + ---------- + query : str + The Gremlin query - Returns: - Any: [description] + Returns + ------- + Any + [description] """ return self._execute_gremlin(query, headers) def write_gremlin(self, query: str) -> bool: - """Executes a Gremlin write query + """Execute a Gremlin write query. - Args: + Parameters + ---------- query (str): The query to execute - Returns: - bool: The success of the Gremlin write query + Returns + ------- + bool + The success of the Gremlin write query """ res = self._execute_gremlin(query) _logger.debug(res) @@ -178,28 +190,38 @@ def _execute_gremlin(self, query: str, headers: Any = None) -> Any: raise exceptions.QueryFailed(e) def read_sparql(self, query: str, headers: Any = None) -> Any: - """Executes the given query and returns the results - - Args: - query ([type]): The SPARQL query to execute - headers (Any, optional): Any additional headers to include with the request. Defaults to None. - - Returns: - Any: [description] + """Execute the given query and returns the results. + + Parameters + ---------- + query : str + The SPARQL query to execute + headers : Any, optional + Any additional headers to include with the request. Defaults to None. + + Returns + ------- + Any + [description] """ res = self._execute_sparql(query, headers) _logger.debug(res) return res def write_sparql(self, query: str, headers: Any = None) -> bool: - """Executes the specified SPARQL write statements - - Args: - query ([type]): The SPARQL query to execute - headers (Any, optional): Any additional headers to include with the request. Defaults to None. - - Returns: - bool: The success of the query + """Execute the specified SPARQL write statements. + + Parameters + ---------- + query : str + The SPARQL query to execute + headers : Any, optional + Any additional headers to include with the request. Defaults to None. + + Returns + ------- + bool + The success of the query """ self._execute_sparql(query, headers) return True @@ -228,10 +250,12 @@ def _execute_sparql(self, query: str, headers: Any) -> Any: raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") def status(self) -> Any: - """Returns the status of the Neptune cluster + """Return the status of the Neptune cluster. - Returns: - str: The result of the call to the status API for the Neptune cluster + Returns + ------- + str + The result of the call to the status API for the Neptune cluster """ url = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/status" req = self._prepare_request("GET", url, data="") diff --git a/awswrangler/neptune/gremlin_parser.py b/awswrangler/neptune/gremlin_parser.py index 86d7bc310..c32d77ece 100644 --- a/awswrangler/neptune/gremlin_parser.py +++ b/awswrangler/neptune/gremlin_parser.py @@ -5,17 +5,21 @@ class GremlinParser: - """This represents a parser for returning Gremlin results as a dictionary.""" + """Class representing a parser for returning Gremlin results as a dictionary.""" @staticmethod def gremlin_results_to_dict(result: Any) -> Any: - """Takes a Gremlin ResultSet and returns a dictionary + """Take a Gremlin ResultSet and return a dictionary. - Args: - result (Any): The Gremlin resultset to convert + Parameters + ---------- + result : Any + The Gremlin resultset to convert - Returns: - Any: A dictionary of the results + Returns + ------- + Any + A dictionary of the results """ res = [] diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 0f9c617ed..d378b73ee 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,4 +1,4 @@ -"""Amazon Neptune Module""" +"""Amazon Neptune Module.""" import logging from typing import Any @@ -248,15 +248,21 @@ def to_rdf_graph( def connect(host: str, port: int, iam_enabled: bool = False, **kwargs: Any) -> NeptuneClient: - """Creates a connection to a Neptune cluster + """Create a connection to a Neptune cluster. - Args: - host (str): The host endpoint to connect to - port (int): The port endpoint to connect to - iam_enabled (bool, optional): set to True if IAM is enabled on the cluster. Defaults to False. + Parameters + ---------- + host : str + The host endpoint to connect to + port : int + The port endpoint to connect to + iam_enabled : bool, optional + True if IAM is enabled on the cluster. Defaults to False. - Returns: - NeptuneClient: [description] + Returns + ------- + NeptuneClient + [description] """ return NeptuneClient(host, port, iam_enabled, **kwargs) @@ -322,17 +328,23 @@ def _run_gremlin_insert(client: NeptuneClient, g: GraphTraversalSource) -> bool: def flatten_nested_df( df: pd.DataFrame, include_prefix: bool = True, seperator: str = "_", recursive: bool = True ) -> pd.DataFrame: - """This will flatten the lists and dictionaries of the input data frame + """Flatten the lists and dictionaries of the input data frame. - Args: - df (pd.DataFrame): The input data frame - include_prefix (bool, optional): If True, then it will prefix the new column name with the original column name. + Parameters + ---------- + df : pd.DataFrame + The input data frame + include_prefix : bool, optional + If True, then it will prefix the new column name with the original column name. Defaults to True. - seperator (str, optional): The seperator to use between field names when a dictionary is exploded. + seperator : str, optional + The seperator to use between field names when a dictionary is exploded. Defaults to "_". - recursive (bool, optional): If True, then this will recurse the fields in the data frame. Defaults to True. + recursive : bool, optional + If True, then this will recurse the fields in the data frame. Defaults to True. - Returns: + Returns + ------- pd.DataFrame: The flattened data frame """ if seperator is None: From 00a68e90bb0ad34ce83d5ba0120b1ac0e56ed82c Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Mon, 14 Mar 2022 15:45:31 -0800 Subject: [PATCH 21/32] Added functionality to properly set the edge id values as well as added the ability to specify the cardinality using the column header --- awswrangler/neptune/neptune.py | 84 ++++++++++++++++++++++------------ tests/test_neptune.py | 63 ++++++++++++++++++++++--- 2 files changed, 111 insertions(+), 36 deletions(-) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 0f9c617ed..88697bac4 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -1,6 +1,7 @@ """Amazon Neptune Module""" import logging +import re from typing import Any import pandas as pd @@ -108,7 +109,9 @@ def execute_sparql(client: NeptuneClient, query: str) -> pd.DataFrame: return df -def to_property_graph(client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50) -> bool: +def to_property_graph( + client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50, use_header_cardinality: bool = False +) -> bool: """Write records stored in a DataFrame into Amazon Neptune. If writing to a property graph then DataFrames for vertices and edges must be written separately. @@ -119,12 +122,18 @@ def to_property_graph(client: NeptuneClient, df: pd.DataFrame, batch_size: int = the specified id does not exists, or is empty then a new edge will be added. If no ~label, ~to, or ~from column exists an exception will be thrown. + If you would like to save data using `single` cardinality then you can postfix (single) to the column header and + set use_header_cardinality=True. e.g. A column named `name(single)` will save the `name` property as single + cardinality. + Parameters ---------- client : NeptuneClient instance of the neptune client to use df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html + batch_size: The number of rows to save at a time. Default 50 + use_header_cardinality: If True, then the header cardinality will be used to save the data. Default False Returns ------- @@ -159,11 +168,11 @@ def to_property_graph(client: NeptuneClient, df: pd.DataFrame, batch_size: int = for (index, row) in df.iterrows(): # build up a query if is_update_df: - g = _build_gremlin_update(g, row) + g = _build_gremlin_update(g, row, use_header_cardinality) elif is_edge_df: - g = _build_gremlin_insert_edges(g, row.to_dict()) + g = _build_gremlin_insert_edges(g, row.to_dict(), use_header_cardinality) else: - g = _build_gremlin_insert_vertices(g, row.to_dict()) + g = _build_gremlin_insert_vertices(g, row.to_dict(), use_header_cardinality) # run the query if index > 0 and index % batch_size == 0: res = _run_gremlin_insert(client, g) @@ -261,51 +270,68 @@ def connect(host: str, port: int, iam_enabled: bool = False, **kwargs: Any) -> N return NeptuneClient(host, port, iam_enabled, **kwargs) -def _build_gremlin_update(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: - g = g.V(str(row["~id"])) +def _get_column_name(column: str) -> str: + if "(single)" in column.lower(): + return re.compile(r"\(single\)", re.IGNORECASE).sub("", column) + else: + return column + + +def _set_properties(g: GraphTraversalSource, use_header_cardinality: bool, row: Any) -> GraphTraversalSource: for (column, value) in row.items(): - if column not in ["~id", "~label"]: - if isinstance(value, list) and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) + if column not in ["~id", "~label", "~to", "~from"]: + # If the column header is specifying the cardinality then use it + if use_header_cardinality: + if column.lower().find("(single)") > 0: + g = g.property(Cardinality.single, _get_column_name(column), value) + else: + g = _expand_properties(g, _get_column_name(column), value) + else: + # If not using header cardinality then use the default of set + g = _expand_properties(g, _get_column_name(column), value) + return g + +def _expand_properties(g: GraphTraversalSource, column: str, value: Any) -> GraphTraversalSource: + # If this is a list then expand it out into multiple property calls + if isinstance(value, list) and len(value) > 0: + for item in value: + g = g.property(Cardinality.set_, _get_column_name(column), item) + else: + g = g.property(Cardinality.set_, _get_column_name(column), value) return g -def _build_gremlin_insert_vertices(g: GraphTraversalSource, row: Any) -> GraphTraversalSource: - g = g.V(str(row["~id"])).fold().coalesce(__.unfold(), __.addV(row["~label"]).property(T.id, str(row["~id"]))) - for (column, value) in row.items(): - if column not in ["~id", "~label"]: - if isinstance(value, list) and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) +def _build_gremlin_update(g: GraphTraversalSource, row: Any, use_header_cardinality: bool) -> GraphTraversalSource: + g = g.V(str(row["~id"])) + g = _set_properties(g, use_header_cardinality, row) + return g + +def _build_gremlin_insert_vertices( + g: GraphTraversalSource, row: Any, use_header_cardinality: bool = False +) -> GraphTraversalSource: + g = g.V(str(row["~id"])).fold().coalesce(__.unfold(), __.addV(row["~label"]).property(T.id, str(row["~id"]))) + g = _set_properties(g, use_header_cardinality, row) return g -def _build_gremlin_insert_edges(g: GraphTraversalSource, row: pd.Series) -> GraphTraversalSource: +def _build_gremlin_insert_edges( + g: GraphTraversalSource, row: pd.Series, use_header_cardinality: bool +) -> GraphTraversalSource: g = ( g.V(str(row["~from"])) .fold() .coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row["~from"], "~label": "Vertex"})) .addE(row["~label"]) + .property(T.id, str(row["~id"])) .to( __.V(str(row["~to"])) .fold() .coalesce(__.unfold(), _build_gremlin_insert_vertices(__, {"~id": row["~to"], "~label": "Vertex"})) ) ) - for (column, value) in row.items(): - if column not in ["~id", "~label", "~to", "~from"]: - if isinstance(value, list) and len(value) > 0: - for item in value: - g = g.property(Cardinality.set_, column, item) - elif not pd.isna(value) and not pd.isnull(value): - g = g.property(column, value) + g = _set_properties(g, use_header_cardinality, row) return g diff --git a/tests/test_neptune.py b/tests/test_neptune.py index 8f94a07e0..fc517b64c 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -6,6 +6,7 @@ import pandas as pd import pytest # type: ignore +from gremlin_python.process.traversal import Direction, T import awswrangler as wr @@ -173,14 +174,25 @@ def test_gremlin_write_updates(neptune_endpoint, neptune_port) -> Dict[str, Any] id = uuid.uuid4() wr.neptune.execute_gremlin(client, f"g.addV().property(T.id, '{str(id)}')") - data = [{"~id": id, "age": 50}] + data = [{"~id": id, "age": 50, "name": "foo"}] df = pd.DataFrame(data) res = wr.neptune.to_property_graph(client, df) - assert res + res = wr.neptune.execute_gremlin(client, f"g.V('{id}').valueMap().with(WithOptions.tokens)") + saved_row = res.iloc[0] + assert saved_row["age"] == 50 final_df = wr.neptune.execute_gremlin(client, f"g.V('{str(id)}').values('age')") assert final_df.iloc[0][0] == 50 + # check write cardinality + data = [{"~id": id, "age(single)": 55, "name": "bar"}] + df = pd.DataFrame(data) + res = wr.neptune.to_property_graph(client, df, use_header_cardinality=True) + res = wr.neptune.execute_gremlin(client, f"g.V('{id}').valueMap().with(WithOptions.tokens)") + saved_row = res.iloc[0] + assert saved_row["age"] == 55 + assert saved_row["name"] == ["foo", "bar"] + def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) @@ -191,6 +203,14 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any res = wr.neptune.to_property_graph(client, df) assert res + original_row = df.iloc[0] + res = wr.neptune.execute_gremlin(client, f"g.V('{original_row['~id']}').elementMap()") + saved_row = res.iloc[0] + assert saved_row[T.id] == original_row["~id"] + assert saved_row[T.label] == original_row["~label"] + assert saved_row["int"] == original_row["int"] + assert saved_row["str"] == original_row["str"] + final_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 @@ -206,6 +226,25 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any batch_cnt_df = wr.neptune.execute_gremlin(client, "g.V().hasLabel('foo').count()") assert batch_cnt_df.iloc[0][0] == final_cnt_df.iloc[0][0] + 50 + # check write cardinality + v = _create_dummy_vertex() + v2 = _create_dummy_vertex() + v2["~id"] = v["~id"] + df = pd.DataFrame([v]) + res = wr.neptune.to_property_graph(client, df) + original_row = df.iloc[0] + + # save it a second time to make sure it updates correctly when re-adding + df = pd.DataFrame([v2]) + df.rename(columns={"int": "int(single)"}, inplace=True) + res = wr.neptune.to_property_graph(client, df, use_header_cardinality=True) + res = wr.neptune.execute_gremlin(client, f"g.V('{original_row['~id']}').valueMap().with(WithOptions.tokens)") + saved_row = res.iloc[0] + assert saved_row[T.id] == original_row["~id"] + assert saved_row[T.label] == original_row["~label"] + assert saved_row["int"] == v2["int"] + assert len(saved_row["str"]) == 2 + def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) @@ -217,6 +256,16 @@ def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: res = wr.neptune.to_property_graph(client, df) assert res + original_row = df.iloc[0] + res = wr.neptune.execute_gremlin(client, f"g.E('{original_row['~id']}').elementMap()") + saved_row = res.iloc[0] + assert saved_row[T.id] == original_row["~id"] + assert saved_row[T.label] == original_row["~label"] + assert saved_row[Direction.IN][T.id] == original_row["~to"] + assert saved_row[Direction.OUT][T.id] == original_row["~from"] + assert saved_row["int"] == original_row["int"] + assert saved_row["str"] == original_row["str"] + final_cnt_df = wr.neptune.execute_gremlin(client, "g.E().hasLabel('bar').count()") assert final_cnt_df.iloc[0][0] == initial_cnt_df.iloc[0][0] + 3 @@ -285,7 +334,7 @@ def test_sparql_write_quads(neptune_endpoint, neptune_port) -> Dict[str, Any]: def _create_dummy_vertex() -> Dict[str, Any]: data = dict() - data["~id"] = uuid.uuid4() + data["~id"] = str(uuid.uuid4()) data["~label"] = "foo" data["int"] = random.randint(0, 1000) data["str"] = "".join(random.choice(string.ascii_lowercase) for i in range(10)) @@ -295,10 +344,10 @@ def _create_dummy_vertex() -> Dict[str, Any]: def _create_dummy_edge() -> Dict[str, Any]: data = dict() - data["~id"] = uuid.uuid4() + data["~id"] = str(uuid.uuid4()) data["~label"] = "bar" - data["~to"] = uuid.uuid4() - data["~from"] = uuid.uuid4() + data["~to"] = str(uuid.uuid4()) + data["~from"] = str(uuid.uuid4()) data["int"] = random.randint(0, 1000) data["str"] = "".join(random.choice(string.ascii_lowercase) for i in range(10)) return data @@ -307,7 +356,7 @@ def _create_dummy_edge() -> Dict[str, Any]: def _create_dummy_triple() -> Dict[str, Any]: data = dict() data["s"] = "foo" - data["p"] = uuid.uuid4() + data["p"] = str(uuid.uuid4()) data["o"] = random.randint(0, 1000) return data From 201de7346524bb975005e46ce39bf2ae2e54d6b7 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Tue, 15 Mar 2022 09:55:40 +0000 Subject: [PATCH 22/32] Validate fixes and security group in test_infra --- awswrangler/neptune/neptune.py | 3 +-- test_infra/stacks/databases_stack.py | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index f7cea58fd..6ea9a1fb5 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -279,8 +279,7 @@ def connect(host: str, port: int, iam_enabled: bool = False, **kwargs: Any) -> N def _get_column_name(column: str) -> str: if "(single)" in column.lower(): return re.compile(r"\(single\)", re.IGNORECASE).sub("", column) - else: - return column + return column def _set_properties(g: GraphTraversalSource, use_header_cardinality: bool, row: Any) -> GraphTraversalSource: diff --git a/test_infra/stacks/databases_stack.py b/test_infra/stacks/databases_stack.py index b06294531..3b9b53b02 100644 --- a/test_infra/stacks/databases_stack.py +++ b/test_infra/stacks/databases_stack.py @@ -565,13 +565,14 @@ def _setup_sqlserver(self) -> None: cdk.CfnOutput(self, "SqlServerDatabase", value=database) cdk.CfnOutput(self, "SqlServerSchema", value=schema) - def _setup_neptune(self, iam_enabled=False, port=8182) -> None: + def _setup_neptune(self, iam_enabled: bool = False, port: int = 8182) -> None: cluster = neptune.DatabaseCluster( self, "DataWrangler", vpc=self.vpc, instance_type=neptune.InstanceType.R5_LARGE, iam_authentication=iam_enabled, + security_groups=[self.db_security_group], ) cdk.CfnOutput(self, "NeptuneClusterEndpoint", value=cluster.cluster_endpoint.hostname) From 4aabf846dcb1334c875a7bbad5f199544af6b392 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Tue, 15 Mar 2022 10:37:26 +0000 Subject: [PATCH 23/32] Minor - typing and docs --- awswrangler/neptune/client.py | 10 +++++----- awswrangler/neptune/gremlin_parser.py | 8 ++++---- awswrangler/neptune/neptune.py | 14 ++++++++------ 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index e5a7b6ddd..74eb4ab94 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -1,7 +1,7 @@ """Amazon NeptuneClient Module.""" import logging -from typing import Any, Optional +from typing import Any, Dict, List, Optional import boto3 import nest_asyncio @@ -141,7 +141,7 @@ def read_opencypher(self, query: str, headers: Any = None) -> Any: return res.json()["results"] raise exceptions.QueryFailed(f"Status Code: {res.status_code} Reason: {res.reason} Message: {res.text}") - def read_gremlin(self, query: str, headers: Any = None) -> Any: + def read_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: """Execute the provided Gremlin traversal and returns the results. Parameters @@ -151,8 +151,8 @@ def read_gremlin(self, query: str, headers: Any = None) -> Any: Returns ------- - Any - [description] + Dict[str, Any] + Dictionary with the results """ return self._execute_gremlin(query, headers) @@ -172,7 +172,7 @@ def write_gremlin(self, query: str) -> bool: _logger.debug(res) return True - def _execute_gremlin(self, query: str, headers: Any = None) -> Any: + def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: try: nest_asyncio.apply() uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" diff --git a/awswrangler/neptune/gremlin_parser.py b/awswrangler/neptune/gremlin_parser.py index c32d77ece..6d095e725 100644 --- a/awswrangler/neptune/gremlin_parser.py +++ b/awswrangler/neptune/gremlin_parser.py @@ -1,5 +1,5 @@ """Amazon Neptune GremlinParser Module (PRIVATE).""" -from typing import Any, Dict +from typing import Any, Dict, List from gremlin_python.structure.graph import Edge, Path, Property, Vertex, VertexProperty @@ -8,7 +8,7 @@ class GremlinParser: """Class representing a parser for returning Gremlin results as a dictionary.""" @staticmethod - def gremlin_results_to_dict(result: Any) -> Any: + def gremlin_results_to_dict(result: Any) -> List[Dict[str, Any]]: """Take a Gremlin ResultSet and return a dictionary. Parameters @@ -18,8 +18,8 @@ def gremlin_results_to_dict(result: Any) -> Any: Returns ------- - Any - A dictionary of the results + List[Dict[str, Any]] + A list of dictionary results """ res = [] diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 6ea9a1fb5..104c1dd88 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -132,13 +132,15 @@ def to_property_graph( instance of the neptune client to use df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html - batch_size: The number of rows to save at a time. Default 50 - use_header_cardinality: If True, then the header cardinality will be used to save the data. Default False + batch_size : int + The number of rows to save at a time. Default 50 + use_header_cardinality : bool + If True, then the header cardinality will be used to save the data. Default False Returns ------- - None - None. + bool + True if records were written Examples -------- @@ -213,8 +215,8 @@ def to_rdf_graph( Returns ------- - None - None. + bool + True if records were written Examples -------- From 52ce870094d913109a240d81a3ef6f18968a3dd1 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Tue, 15 Mar 2022 11:12:49 +0000 Subject: [PATCH 24/32] [skip ci] - Minor - Add Neptune docs entry --- docs/source/api.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/source/api.rst b/docs/source/api.rst index 0caecc332..8222aa20e 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -12,6 +12,7 @@ API Reference * `Data API Redshift`_ * `Data API RDS`_ * `OpenSearch`_ +* `Amazon Neptune`_ * `DynamoDB`_ * `Amazon Timestream`_ * `Amazon EMR`_ @@ -240,6 +241,22 @@ OpenSearch search search_by_sql +Amazon Neptune +-------------- + +.. currentmodule:: awswrangler.neptune + +.. autosummary:: + :toctree: stubs + + connect + execute_gremlin + execute_opencypher + execute_sparql + flatten_nested_df + to_property_graph + to_rdf_graph + DynamoDB -------- From f06c01ad373aeacd49d7037e51f66dbc983f422b Mon Sep 17 00:00:00 2001 From: Anton Kukushkin <3997468+kukushking@users.noreply.github.com> Date: Tue, 15 Mar 2022 16:52:58 +0000 Subject: [PATCH 25/32] Use built-in gremlin driver functionality to enable event loop nesting --- awswrangler/neptune/client.py | 10 +++++++--- poetry.lock | 30 +----------------------------- pyproject.toml | 1 - 3 files changed, 8 insertions(+), 33 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 74eb4ab94..c5a7b73c2 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -4,11 +4,11 @@ from typing import Any, Dict, List, Optional import boto3 -import nest_asyncio import requests from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest from gremlin_python.driver import client +from gremlin_python.driver.aiohttp.transport import AiohttpTransport from SPARQLWrapper import SPARQLWrapper from awswrangler import exceptions @@ -174,11 +174,15 @@ def write_gremlin(self, query: str) -> bool: def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: try: - nest_asyncio.apply() uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" request = self._prepare_request("GET", uri, headers=headers) ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" - c = client.Client(ws_url, "g", headers=dict(request.headers)) + c = client.Client( + ws_url, + "g", + headers=dict(request.headers), + transport_factory=AiohttpTransport(call_from_event_loop=True), + ) result = c.submit(query) future_results = result.all() results = future_results.result() diff --git a/poetry.lock b/poetry.lock index f14aeb6a6..39f0970c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2307,7 +2307,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "de3d0a6b2863272884719e0bba489b7e0df29c6621176faab55f6b729043f1da" +content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" [metadata.files] aenum = [ @@ -3013,9 +3013,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -3027,9 +3024,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -3041,9 +3035,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, @@ -3056,9 +3047,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -3071,9 +3059,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -3336,11 +3321,8 @@ pandas = [ {file = "pandas-1.3.3-cp39-cp39-win32.whl", hash = "sha256:f7d84f321674c2f0f31887ee6d5755c54ca1ea5e144d6d54b3bbf566dd9ea0cc"}, {file = "pandas-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:e574c2637c9d27f322e911650b36e858c885702c5996eda8a5a60e35e6648cf2"}, {file = "pandas-1.3.3.tar.gz", hash = "sha256:272c8cb14aa9793eada6b1ebe81994616e647b5892a370c7135efb2924b701df"}, - {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"}, - {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"}, - {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"}, {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"}, @@ -3624,32 +3606,24 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, @@ -3657,8 +3631,6 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, diff --git a/pyproject.toml b/pyproject.toml index f54f16b56..2321c060b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,6 @@ xlrd = { version = "^2.0.1", python = "~3.6" } xlwt = { version = "^1.3.0", python = "~3.6" } gremlinpython = "^3.5.2" backoff = "^1.11.1" -nest-asyncio = "^1.5.4" SPARQLWrapper = "^1.8.5" pyodbc = { version = "~4.0.32", optional = true } From 6ba9ccd84be7379d54355efeb467421752164113 Mon Sep 17 00:00:00 2001 From: Anton Kukushkin <3997468+kukushking@users.noreply.github.com> Date: Tue, 15 Mar 2022 17:25:15 +0000 Subject: [PATCH 26/32] Use built-in gremlin driver functionality to enable event loop nesting --- awswrangler/neptune/client.py | 4 +--- poetry.lock | 30 +----------------------------- pyproject.toml | 1 - 3 files changed, 2 insertions(+), 33 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 74eb4ab94..7819bf4b8 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -4,7 +4,6 @@ from typing import Any, Dict, List, Optional import boto3 -import nest_asyncio import requests from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest @@ -174,11 +173,10 @@ def write_gremlin(self, query: str) -> bool: def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: try: - nest_asyncio.apply() uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" request = self._prepare_request("GET", uri, headers=headers) ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" - c = client.Client(ws_url, "g", headers=dict(request.headers)) + c = client.Client(ws_url, "g", headers=dict(request.headers), call_from_event_loop=True) result = c.submit(query) future_results = result.all() results = future_results.result() diff --git a/poetry.lock b/poetry.lock index f14aeb6a6..39f0970c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2307,7 +2307,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "de3d0a6b2863272884719e0bba489b7e0df29c6621176faab55f6b729043f1da" +content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" [metadata.files] aenum = [ @@ -3013,9 +3013,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -3027,9 +3024,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -3041,9 +3035,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, @@ -3056,9 +3047,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -3071,9 +3059,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -3336,11 +3321,8 @@ pandas = [ {file = "pandas-1.3.3-cp39-cp39-win32.whl", hash = "sha256:f7d84f321674c2f0f31887ee6d5755c54ca1ea5e144d6d54b3bbf566dd9ea0cc"}, {file = "pandas-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:e574c2637c9d27f322e911650b36e858c885702c5996eda8a5a60e35e6648cf2"}, {file = "pandas-1.3.3.tar.gz", hash = "sha256:272c8cb14aa9793eada6b1ebe81994616e647b5892a370c7135efb2924b701df"}, - {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"}, - {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"}, - {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"}, {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"}, @@ -3624,32 +3606,24 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, - {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, - {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, - {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, - {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, @@ -3657,8 +3631,6 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, - {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, diff --git a/pyproject.toml b/pyproject.toml index f54f16b56..2321c060b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,6 @@ xlrd = { version = "^2.0.1", python = "~3.6" } xlwt = { version = "^1.3.0", python = "~3.6" } gremlinpython = "^3.5.2" backoff = "^1.11.1" -nest-asyncio = "^1.5.4" SPARQLWrapper = "^1.8.5" pyodbc = { version = "~4.0.32", optional = true } From 6bf0450fd985e558fb3d11b28963e582f027ce6b Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 18 Mar 2022 10:15:23 -0800 Subject: [PATCH 27/32] Updated the connection handling for Gremlin and added a tutorial notebook for Amazon Neptune --- awswrangler/neptune/client.py | 31 +- awswrangler/neptune/neptune.py | 18 +- tests/test_neptune.py | 27 +- tutorials/033 - Amazon Neptune.ipynb | 504 +++++++++++++++++++++++++++ 4 files changed, 556 insertions(+), 24 deletions(-) create mode 100644 tutorials/033 - Amazon Neptune.ipynb diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index c5a7b73c2..aa8320fdb 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -42,7 +42,12 @@ def __init__( else: self.region = region self._http_session = requests.Session() + self.gremlin_connection = None + def __del__(self): + if isinstance(self.gremlin_connection, client.Client): + self.gremlin_connection.close() + def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -174,25 +179,29 @@ def write_gremlin(self, query: str) -> bool: def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: try: - uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" - request = self._prepare_request("GET", uri, headers=headers) - ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" - c = client.Client( - ws_url, - "g", - headers=dict(request.headers), - transport_factory=AiohttpTransport(call_from_event_loop=True), - ) + c = self._get_gremlin_connection(headers) result = c.submit(query) future_results = result.all() results = future_results.result() - c.close() return GremlinParser.gremlin_results_to_dict(results) except Exception as e: - c.close() + if isinstance(self.gremlin_connection, client.Client): + self.gremlin_connection.close() + self.gremlin_connection = None _logger.error(e) raise exceptions.QueryFailed(e) + + def _get_gremlin_connection(self, headers: Any = None) -> client.Client: + if self.gremlin_connection is None: + nest_asyncio.apply() + uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" + request = self._prepare_request("GET", uri, headers=headers) + ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" + self.gremlin_connection = client.Client(ws_url, "g", headers=dict(request.headers)) + return self.gremlin_connection + + def read_sparql(self, query: str, headers: Any = None) -> Any: """Execute the given query and returns the results. diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 104c1dd88..22495eed2 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -110,7 +110,7 @@ def execute_sparql(client: NeptuneClient, query: str) -> pd.DataFrame: def to_property_graph( - client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50, use_header_cardinality: bool = False + client: NeptuneClient, df: pd.DataFrame, batch_size: int = 50, use_header_cardinality: bool = True ) -> bool: """Write records stored in a DataFrame into Amazon Neptune. @@ -123,8 +123,8 @@ def to_property_graph( exists an exception will be thrown. If you would like to save data using `single` cardinality then you can postfix (single) to the column header and - set use_header_cardinality=True. e.g. A column named `name(single)` will save the `name` property as single - cardinality. + set use_header_cardinality=True (default). e.g. A column named `name(single)` will save the `name` property as single + cardinality. You can disable this by setting by setting `use_header_cardinality=False`. Parameters ---------- @@ -132,10 +132,8 @@ def to_property_graph( instance of the neptune client to use df : pandas.DataFrame Pandas DataFrame https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.html - batch_size : int - The number of rows to save at a time. Default 50 - use_header_cardinality : bool - If True, then the header cardinality will be used to save the data. Default False + batch_size: The number of rows to save at a time. Default 50 + use_header_cardinality: If True, then the header cardinality will be used to save the data. Default True Returns ------- @@ -295,7 +293,7 @@ def _set_properties(g: GraphTraversalSource, use_header_cardinality: bool, row: g = _expand_properties(g, _get_column_name(column), value) else: # If not using header cardinality then use the default of set - g = _expand_properties(g, _get_column_name(column), value) + g = _expand_properties(g, column, value) return g @@ -303,9 +301,9 @@ def _expand_properties(g: GraphTraversalSource, column: str, value: Any) -> Grap # If this is a list then expand it out into multiple property calls if isinstance(value, list) and len(value) > 0: for item in value: - g = g.property(Cardinality.set_, _get_column_name(column), item) + g = g.property(Cardinality.set_, column, item) else: - g = g.property(Cardinality.set_, _get_column_name(column), value) + g = g.property(Cardinality.set_, column, value) return g diff --git a/tests/test_neptune.py b/tests/test_neptune.py index fc517b64c..3599aa816 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -17,7 +17,12 @@ @pytest.fixture(scope="session") def cloudformation_outputs(): - return extract_cloudformation_outputs() + #return extract_cloudformation_outputs() + outputs = {} + outputs['NeptuneClusterEndpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' + outputs['NeptunePort'] = 8182 + outputs['NeptuneIAMEnabled'] = False + return outputs @pytest.fixture(scope="session") @@ -72,8 +77,8 @@ def test_opencypher_query(neptune_endpoint, neptune_port) -> Dict[str, Any]: def test_flatten_df(neptune_endpoint, neptune_port) -> Dict[str, Any]: client = wr.neptune.connect(neptune_endpoint, neptune_port, iam_enabled=False) - wr.neptune.execute_opencypher(client, "create (a:Foo { name: 'foo' })-[:TEST]->(b {name : 'bar'})") - df = wr.neptune.execute_opencypher(client, "MATCH (n:Foo) RETURN n LIMIT 1") + wr.neptune.execute_opencypher(client, "create (a:Foo1 { name: 'foo' })-[:TEST]->(b {name : 'bar'})") + df = wr.neptune.execute_opencypher(client, "MATCH (n:Foo1) RETURN n LIMIT 1") df_test = wr.neptune.flatten_nested_df(df) assert isinstance(df_test, pd.DataFrame) assert df_test.shape == (1, 6) @@ -192,6 +197,11 @@ def test_gremlin_write_updates(neptune_endpoint, neptune_port) -> Dict[str, Any] saved_row = res.iloc[0] assert saved_row["age"] == 55 assert saved_row["name"] == ["foo", "bar"] + res = wr.neptune.to_property_graph(client, df, use_header_cardinality=False) + res = wr.neptune.execute_gremlin(client, f"g.V('{id}').valueMap().with(WithOptions.tokens)") + saved_row = res.iloc[0] + assert saved_row["age(single)"] == 55 + assert saved_row["name"] == ["foo", "bar"] def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any]: @@ -244,6 +254,17 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any assert saved_row[T.label] == original_row["~label"] assert saved_row["int"] == v2["int"] assert len(saved_row["str"]) == 2 + + # Check that it is respecting the header cardinality + df = pd.DataFrame([v2]) + df.rename(columns={"int": "int(single)"}, inplace=True) + res = wr.neptune.to_property_graph(client, df, use_header_cardinality=True) + res = wr.neptune.execute_gremlin(client, f"g.V('{original_row['~id']}').valueMap().with(WithOptions.tokens)") + saved_row = res.iloc[0] + assert saved_row[T.id] == original_row["~id"] + assert saved_row[T.label] == original_row["~label"] + assert saved_row["int"] == v2["int"] + assert len(saved_row["str"]) == 2 def test_gremlin_write_edges(neptune_endpoint, neptune_port) -> Dict[str, Any]: diff --git a/tutorials/033 - Amazon Neptune.ipynb b/tutorials/033 - Amazon Neptune.ipynb new file mode 100644 index 000000000..b79c860f6 --- /dev/null +++ b/tutorials/033 - Amazon Neptune.ipynb @@ -0,0 +1,504 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "b0ee9a28", + "metadata": {}, + "source": [ + "# Initialize\n", + "\n", + "The first step to using AWS Data Wrangler with Amazon Neptune is to import the library and create a client connection.\n", + "\n", + "
Note: Connecting to Amazon Neptune requires that the application you are running has access to the Private VPC where Neptune is located. Without this access you will not be able to connect using AWS Data Wrangler.
" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd098b2c", + "metadata": {}, + "outputs": [], + "source": [ + "import awswrangler as wr\n", + "import pandas as pd\n", + "\n", + "url='air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' # The Neptune Cluster endpoint\n", + "iam_enabled = False # Set to True/False based on the configuration of your cluster\n", + "neptune_port = 8182 # Set to the Neptune Cluster Port, Default is 8182\n", + "client = wr.neptune.connect(url, neptune_port, iam_enabled=iam_enabled)" + ] + }, + { + "cell_type": "markdown", + "id": "1e9499ea", + "metadata": {}, + "source": [ + "# Return the status of the cluster" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "57903cf4", + "metadata": {}, + "outputs": [], + "source": [ + "print(client.status())" + ] + }, + { + "cell_type": "markdown", + "id": "6f13f0cb", + "metadata": {}, + "source": [ + "# Retrieve Data from Neptune using AWS Data Wrangler\n", + "\n", + "AWS Data Wrangler supports querying Amazon Neptune using TinkerPop Gremlin and openCypher for property graph data or SPARQL for RDF data.\n", + "\n", + "## Gremlin" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2801f447", + "metadata": {}, + "outputs": [], + "source": [ + "query = \"g.E().project('source', 'target').by(outV().id()).by(inV().id()).limit(5)\"\n", + "df = wr.neptune.execute_gremlin(client, query)\n", + "display(df.head(5))" + ] + }, + { + "cell_type": "markdown", + "id": "a7666d80", + "metadata": {}, + "source": [ + "## SPARQL" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91b52363", + "metadata": {}, + "outputs": [], + "source": [ + "query = \"SELECT ?s ?o WHERE { ?s ?p ?o .} LIMIT 5\"\n", + "df = wr.neptune.execute_sparql(client, query)\n", + "display(df.head(5))" + ] + }, + { + "cell_type": "markdown", + "id": "367791b9", + "metadata": {}, + "source": [ + "## openCypher" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce5df2ee", + "metadata": {}, + "outputs": [], + "source": [ + "query = \"MATCH (n)-[r]->(d) RETURN id(n) as source, id(d) as target LIMIT 5\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "display(df.head(5))" + ] + }, + { + "cell_type": "markdown", + "id": "f91b967c", + "metadata": {}, + "source": [ + "# Saving Data using AWS Data Wrangler\n", + "\n", + "AWS Data Wrangler supports saving Pandas DataFrames into Amazon Neptune using either a property graph or RDF data model. \n", + "\n", + "## Property Graph\n", + "\n", + "If writing to a property graph then DataFrames for vertices and edges must be written separately. DataFrames for vertices must have a `~label` column with the label and a `~id` column for the vertex id.\n", + "\n", + "If the `~id` column does not exist, the specified id does not exists, or is empty then a new vertex will be added.\n", + "\n", + "If no `~label` column exists then writing to the graph will be treated as an update of the element with the specified `~id` value.\n", + "\n", + "DataFrames for edges must have a `~id`, `~label`, `~to`, and `~from` column. If the `~id` column does not exist the specified id does not exists, or is empty then a new edge will be added. If no `~label`, `~to`, or `~from` column exists an exception will be thrown.\n", + "\n", + "### Add Vertices/Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "579fd9c0", + "metadata": {}, + "outputs": [], + "source": [ + "import uuid\n", + "import random\n", + "import string\n", + "def _create_dummy_vertex():\n", + " data = dict()\n", + " data[\"~id\"] = uuid.uuid4()\n", + " data[\"~label\"] = \"foo\"\n", + " data[\"int\"] = random.randint(0, 1000)\n", + " data[\"str\"] = \"\".join(random.choice(string.ascii_lowercase) for i in range(10))\n", + " data[\"list\"] = [random.randint(0, 1000), random.randint(0, 1000)]\n", + " return data\n", + "\n", + "data = [_create_dummy_vertex(), _create_dummy_vertex(), _create_dummy_vertex()]\n", + "df = pd.DataFrame(data)\n", + "res = wr.neptune.to_property_graph(client, df)\n", + "query = f\"MATCH (s) WHERE id(s)='{data[0]['~id']}' RETURN s\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "fd5fc8a2", + "metadata": {}, + "source": [ + "### Add Edges" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "515f7a0f", + "metadata": {}, + "outputs": [], + "source": [ + "import uuid\n", + "import random\n", + "import string\n", + "def _create_dummy_edge():\n", + " data = dict()\n", + " data[\"~id\"] = uuid.uuid4()\n", + " data[\"~label\"] = \"bar\"\n", + " data[\"~to\"] = uuid.uuid4()\n", + " data[\"~from\"] = uuid.uuid4()\n", + " data[\"int\"] = random.randint(0, 1000)\n", + " data[\"str\"] = \"\".join(random.choice(string.ascii_lowercase) for i in range(10))\n", + " return data\n", + "\n", + "data = [_create_dummy_edge(), _create_dummy_edge(), _create_dummy_edge()]\n", + "df = pd.DataFrame(data)\n", + "res = wr.neptune.to_property_graph(client, df)\n", + "query = f\"MATCH (s)-[r]->(d) WHERE id(r)='{data[0]['~id']}' RETURN r\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "efe6eaaf", + "metadata": {}, + "source": [ + "### Update Existing Nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d831c7a3", + "metadata": {}, + "outputs": [], + "source": [ + "idval=uuid.uuid4()\n", + "wr.neptune.execute_gremlin(client, f\"g.addV().property(T.id, '{str(idval)}')\")\n", + "query = f\"MATCH (s) WHERE id(s)='{idval}' RETURN s\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "print(\"Before\")\n", + "display(df)\n", + "data = [{\"~id\": idval, \"age\": 50}]\n", + "df = pd.DataFrame(data)\n", + "res = wr.neptune.to_property_graph(client, df)\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "print(\"After\")\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "bff6a1fc", + "metadata": {}, + "source": [ + "### Setting cardinality based on the header\n", + "\n", + " If you would like to save data using `single` cardinality then you can postfix (single) to the column header and\n", + " set `use_header_cardinality=True` (default). e.g. A column named `name(single)` will save the `name` property as single cardinality. You can disable this by setting by setting `use_header_cardinality=False`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1010c2f5", + "metadata": {}, + "outputs": [], + "source": [ + "# Adding (single) to the column name in the DataFrame will cause it to write that property as `single` cardinality\n", + "df.rename(columns={\"int\": \"int(single)\"}, inplace=True)\n", + "res = wr.neptune.to_property_graph(client, df, use_header_cardinality=True)\n", + "\n", + "\n", + "# This can be disabled by setting `use_header_cardinality = False`\n", + "df.rename(columns={\"int\": \"int(single)\"}, inplace=True)\n", + "res = wr.neptune.to_property_graph(client, df, use_header_cardinality=False)" + ] + }, + { + "cell_type": "markdown", + "id": "beca9dab", + "metadata": {}, + "source": [ + "## RDF\n", + "\n", + "The DataFrame must consist of triples with column names for the subject, predicate, and object specified. If none are provided than `s`, `p`, and `o` are the default.\n", + "\n", + "If you want to add data into a named graph then you will also need the graph column, default is `g`.\n", + "\n", + "### Write Triples" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f8427b9", + "metadata": {}, + "outputs": [], + "source": [ + "def _create_dummy_triple():\n", + " data = dict()\n", + " data[\"s\"] = \"foo\"\n", + " data[\"p\"] = uuid.uuid4()\n", + " data[\"o\"] = random.randint(0, 1000)\n", + " return data\n", + "\n", + "data = [_create_dummy_triple(), _create_dummy_triple(), _create_dummy_triple()]\n", + "df = pd.DataFrame(data)\n", + "res = wr.neptune.to_rdf_graph(client, df)\n", + "query = \"SELECT ?o WHERE { <\" + str(data[0]['p']) + \"> ?o .}\"\n", + "df = wr.neptune.execute_sparql(client, query)\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "b7a45c6a", + "metadata": {}, + "source": [ + "### Write Quads" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "819f6a04", + "metadata": {}, + "outputs": [], + "source": [ + "def _create_dummy_quad():\n", + " data = _create_dummy_triple()\n", + " data[\"g\"] = \"bar\"\n", + " return data\n", + " \n", + "data = [_create_dummy_quad(), _create_dummy_quad(), _create_dummy_quad()]\n", + "df = pd.DataFrame(data)\n", + "res = wr.neptune.to_rdf_graph(client, df)\n", + "query = \"SELECT ?o WHERE { <\" + str(data[0]['p']) + \"> ?o .}\"\n", + "df = wr.neptune.execute_sparql(client, query)\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "8370b377", + "metadata": {}, + "source": [ + "# Flatten DataFrames\n", + "\n", + "One of the complexities of working with a row/columns paradigm, such as Pandas, with graph results set is that it is very common for graph results to return complex and nested objects. To help simplify using the results returned from a graph within a more tabular format we have added a method to flatten the returned Pandas DataFrame.\n", + "\n", + "## Flattening the DataFrame" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4488e185", + "metadata": {}, + "outputs": [], + "source": [ + "client = wr.neptune.connect(url, 8182, iam_enabled=False)\n", + "query = \"MATCH (n) RETURN n LIMIT 1\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "print(\"Original\")\n", + "display(df)\n", + "df_new=wr.neptune.flatten_nested_df(df)\n", + "print(\"Flattened\")\n", + "display(df_new)" + ] + }, + { + "cell_type": "markdown", + "id": "9324bff7", + "metadata": {}, + "source": [ + "## Removing the prefixing of the parent column name" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e95099c", + "metadata": {}, + "outputs": [], + "source": [ + "df_new=wr.neptune.flatten_nested_df(df, include_prefix=False)\n", + "display(df_new)" + ] + }, + { + "cell_type": "markdown", + "id": "21738d39", + "metadata": {}, + "source": [ + "## Specifying the column header seperator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f4bcbe3", + "metadata": {}, + "outputs": [], + "source": [ + "df_new=wr.neptune.flatten_nested_df(df, seperator='|')\n", + "display(df_new)" + ] + }, + { + "cell_type": "markdown", + "id": "1bded05b", + "metadata": {}, + "source": [ + "# Putting it into a workflow" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9129f173", + "metadata": {}, + "outputs": [], + "source": [ + "pip install igraph networkx" + ] + }, + { + "cell_type": "markdown", + "id": "cd49d635", + "metadata": {}, + "source": [ + "## Running PageRank using NetworkX" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecd88fe2", + "metadata": {}, + "outputs": [], + "source": [ + "import networkx as nx\n", + "\n", + "# Retrieve Data from neptune\n", + "client = wr.neptune.connect(url, 8182, iam_enabled=False)\n", + "query = \"MATCH (n)-[r]->(d) RETURN id(n) as source, id(d) as target LIMIT 100\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "\n", + "# Run PageRank\n", + "G=nx.from_pandas_edgelist(df, edge_attr=True)\n", + "pg = nx.pagerank(G)\n", + "\n", + "# Save values back into Neptune\n", + "rows=[]\n", + "for k in pg.keys():\n", + " rows.append({'~id': k, 'pageRank_nx(single)': pg[k]})\n", + "pg_df=pd.DataFrame(rows, columns=['~id','pageRank_nx(single)'])\n", + "res = wr.neptune.to_property_graph(client, pg_df, use_header_cardinality=True)\n", + "\n", + "# Retrieve newly saved data\n", + "query = \"MATCH (n:airport) WHERE n.pageRank_nx IS NOT NULL RETURN n.code, n.pageRank_nx ORDER BY n.pageRank_nx DESC LIMIT 5\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "display(df)" + ] + }, + { + "cell_type": "markdown", + "id": "783a599e", + "metadata": {}, + "source": [ + "## Running PageRank using iGraph" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "abb8c7ab", + "metadata": {}, + "outputs": [], + "source": [ + "import igraph as ig\n", + "\n", + "# Retrieve Data from neptune\n", + "client = wr.neptune.connect(url, 8182, iam_enabled=False)\n", + "query = \"MATCH (n)-[r]->(d) RETURN id(n) as source, id(d) as target LIMIT 100\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "\n", + "# Run PageRank\n", + "g = ig.Graph.TupleList(df.itertuples(index=False), directed=True, weights=False)\n", + "pg = g.pagerank()\n", + "\n", + "# Save values back into Neptune\n", + "rows=[]\n", + "for idx, v in enumerate(g.vs):\n", + " rows.append({'~id': v['name'], 'pageRank_ig(single)': pg[idx]}) \n", + "pg_df=pd.DataFrame(rows, columns=['~id','pageRank_ig(single)'])\n", + "res = wr.neptune.to_property_graph(client, pg_df, use_header_cardinality=True)\n", + "\n", + "# Retrieve newly saved data\n", + "query = \"MATCH (n:airport) WHERE n.pageRank_ig IS NOT NULL RETURN n.code, n.pageRank_ig ORDER BY n.pageRank_ig DESC LIMIT 5\"\n", + "df = wr.neptune.execute_opencypher(client, query)\n", + "display(df)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 462bc0bf94bf4144536fe4165b891879e9003cc6 Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Fri, 18 Mar 2022 12:30:44 -0800 Subject: [PATCH 28/32] Fixed validation issues --- awswrangler/neptune/client.py | 16 ++++------------ awswrangler/neptune/neptune.py | 3 ++- poetry.lock | 30 +++++++++++++++++++++++++++++- pyproject.toml | 1 + tests/test_neptune.py | 9 ++------- 5 files changed, 38 insertions(+), 21 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 0ab6685ed..4326c634e 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -4,11 +4,11 @@ from typing import Any, Dict, List, Optional import boto3 +import nest_asyncio import requests from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest from gremlin_python.driver import client -from gremlin_python.driver.aiohttp.transport import AiohttpTransport from SPARQLWrapper import SPARQLWrapper from awswrangler import exceptions @@ -44,10 +44,11 @@ def __init__( self._http_session = requests.Session() self.gremlin_connection = None - def __del__(self): + def __del__(self) -> None: + """Close the Gremlin connection.""" if isinstance(self.gremlin_connection, client.Client): self.gremlin_connection.close() - + def __get_region_from_session(self) -> str: """Extract region from session.""" region: Optional[str] = self.boto3_session.region_name @@ -179,14 +180,7 @@ def write_gremlin(self, query: str) -> bool: def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, Any]]: try: -<<<<<<< HEAD c = self._get_gremlin_connection(headers) -======= - uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" - request = self._prepare_request("GET", uri, headers=headers) - ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" - c = client.Client(ws_url, "g", headers=dict(request.headers), call_from_event_loop=True) ->>>>>>> 6ba9ccd84be7379d54355efeb467421752164113 result = c.submit(query) future_results = result.all() results = future_results.result() @@ -198,7 +192,6 @@ def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, An _logger.error(e) raise exceptions.QueryFailed(e) - def _get_gremlin_connection(self, headers: Any = None) -> client.Client: if self.gremlin_connection is None: nest_asyncio.apply() @@ -208,7 +201,6 @@ def _get_gremlin_connection(self, headers: Any = None) -> client.Client: self.gremlin_connection = client.Client(ws_url, "g", headers=dict(request.headers)) return self.gremlin_connection - def read_sparql(self, query: str, headers: Any = None) -> Any: """Execute the given query and returns the results. diff --git a/awswrangler/neptune/neptune.py b/awswrangler/neptune/neptune.py index 22495eed2..5f8865e44 100644 --- a/awswrangler/neptune/neptune.py +++ b/awswrangler/neptune/neptune.py @@ -123,7 +123,8 @@ def to_property_graph( exists an exception will be thrown. If you would like to save data using `single` cardinality then you can postfix (single) to the column header and - set use_header_cardinality=True (default). e.g. A column named `name(single)` will save the `name` property as single + set use_header_cardinality=True (default). e.g. A column named `name(single)` will save the `name` property + as single cardinality. You can disable this by setting by setting `use_header_cardinality=False`. Parameters diff --git a/poetry.lock b/poetry.lock index 39f0970c4..8e6e3e91e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2307,7 +2307,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" +content-hash = "7cde280e5887ede566e2153c4c64860a11ad5943b892b8ba6cc7f3b07867578a" [metadata.files] aenum = [ @@ -3013,6 +3013,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -3024,6 +3027,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -3035,6 +3041,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, @@ -3047,6 +3056,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -3059,6 +3071,9 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -3321,8 +3336,11 @@ pandas = [ {file = "pandas-1.3.3-cp39-cp39-win32.whl", hash = "sha256:f7d84f321674c2f0f31887ee6d5755c54ca1ea5e144d6d54b3bbf566dd9ea0cc"}, {file = "pandas-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:e574c2637c9d27f322e911650b36e858c885702c5996eda8a5a60e35e6648cf2"}, {file = "pandas-1.3.3.tar.gz", hash = "sha256:272c8cb14aa9793eada6b1ebe81994616e647b5892a370c7135efb2924b701df"}, + {file = "pandas-1.3.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9707bdc1ea9639c886b4d3be6e2a45812c1ac0c2080f94c31b71c9fa35556f9b"}, + {file = "pandas-1.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c2f44425594ae85e119459bb5abb0748d76ef01d9c08583a667e3339e134218e"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:372d72a3d8a5f2dbaf566a5fa5fa7f230842ac80f29a931fb4b071502cf86b9a"}, {file = "pandas-1.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99d2350adb7b6c3f7f8f0e5dfb7d34ff8dd4bc0a53e62c445b7e43e163fce63"}, + {file = "pandas-1.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:4acc28364863127bca1029fb72228e6f473bb50c32e77155e80b410e2068eeac"}, {file = "pandas-1.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c2646458e1dce44df9f71a01dc65f7e8fa4307f29e5c0f2f92c97f47a5bf22f5"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5298a733e5bfbb761181fd4672c36d0c627320eb999c59c65156c6a90c7e1b4f"}, {file = "pandas-1.3.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22808afb8f96e2269dcc5b846decacb2f526dd0b47baebc63d913bf847317c8f"}, @@ -3606,24 +3624,32 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, @@ -3631,6 +3657,8 @@ pyzmq = [ {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, diff --git a/pyproject.toml b/pyproject.toml index 2321c060b..192e24963 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ gremlinpython = "^3.5.2" backoff = "^1.11.1" SPARQLWrapper = "^1.8.5" pyodbc = { version = "~4.0.32", optional = true } +nest-asyncio = "^1.5.4" [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/tests/test_neptune.py b/tests/test_neptune.py index 3599aa816..29af98bb4 100644 --- a/tests/test_neptune.py +++ b/tests/test_neptune.py @@ -17,12 +17,7 @@ @pytest.fixture(scope="session") def cloudformation_outputs(): - #return extract_cloudformation_outputs() - outputs = {} - outputs['NeptuneClusterEndpoint'] = 'air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' - outputs['NeptunePort'] = 8182 - outputs['NeptuneIAMEnabled'] = False - return outputs + return extract_cloudformation_outputs() @pytest.fixture(scope="session") @@ -254,7 +249,7 @@ def test_gremlin_write_vertices(neptune_endpoint, neptune_port) -> Dict[str, Any assert saved_row[T.label] == original_row["~label"] assert saved_row["int"] == v2["int"] assert len(saved_row["str"]) == 2 - + # Check that it is respecting the header cardinality df = pd.DataFrame([v2]) df.rename(columns={"int": "int(single)"}, inplace=True) From 86804162797a3dbb51649ecebeb5c3adbf93188d Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Mon, 21 Mar 2022 14:25:12 +0000 Subject: [PATCH 29/32] Minor - Remove nest_asyncio and fix tutorial --- awswrangler/neptune/client.py | 6 +- poetry.lock | 278 ++++++++++++++------------- pyproject.toml | 1 - tutorials/033 - Amazon Neptune.ipynb | 60 +++--- 4 files changed, 181 insertions(+), 164 deletions(-) diff --git a/awswrangler/neptune/client.py b/awswrangler/neptune/client.py index 4326c634e..f77029109 100644 --- a/awswrangler/neptune/client.py +++ b/awswrangler/neptune/client.py @@ -4,7 +4,6 @@ from typing import Any, Dict, List, Optional import boto3 -import nest_asyncio import requests from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest @@ -194,11 +193,12 @@ def _execute_gremlin(self, query: str, headers: Any = None) -> List[Dict[str, An def _get_gremlin_connection(self, headers: Any = None) -> client.Client: if self.gremlin_connection is None: - nest_asyncio.apply() uri = f"{HTTP_PROTOCOL}://{self.host}:{self.port}/gremlin" request = self._prepare_request("GET", uri, headers=headers) ws_url = f"{WS_PROTOCOL}://{self.host}:{self.port}/gremlin" - self.gremlin_connection = client.Client(ws_url, "g", headers=dict(request.headers)) + self.gremlin_connection = client.Client( + ws_url, "g", headers=dict(request.headers), call_from_event_loop=True + ) return self.gremlin_connection def read_sparql(self, query: str, headers: Any = None) -> Any: diff --git a/poetry.lock b/poetry.lock index 8e6e3e91e..7c8007ac5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -111,7 +111,7 @@ tests = ["pytest"] [[package]] name = "asn1crypto" -version = "1.4.0" +version = "1.5.1" description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" category = "main" optional = false @@ -261,15 +261,15 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.20.54" +version = "1.21.22" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.23.54,<1.24.0" -jmespath = ">=0.7.1,<1.0.0" +botocore = ">=1.24.22,<1.25.0" +jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.5.0,<0.6.0" [package.extras] @@ -277,19 +277,19 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.23.54" +version = "1.24.22" description = "Low-level, data-driven core of boto 3." category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -jmespath = ">=0.7.1,<1.0.0" +jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.12.5)"] +crt = ["awscrt (==0.13.5)"] [[package]] name = "bump2version" @@ -331,7 +331,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.3" +version = "8.0.4" description = "Composable command line interface toolkit" category = "dev" optional = false @@ -376,7 +376,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "36.0.1" +version = "36.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "dev" optional = false @@ -817,7 +817,7 @@ test = ["codecov", "coverage", "ipykernel", "ipython", "mock", "mypy", "pre-comm [[package]] name = "jupyter-core" -version = "4.9.1" +version = "4.9.2" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false @@ -919,7 +919,7 @@ python-versions = ">=3.6" [[package]] name = "lxml" -version = "4.7.1" +version = "4.8.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = false @@ -957,11 +957,11 @@ python-versions = "*" [[package]] name = "moto" -version = "3.0.3" +version = "3.1.1" description = "A library that allows your python tests to easily mock out the boto library" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] boto3 = ">=1.9.201" @@ -979,7 +979,7 @@ xmltodict = "*" [package.extras] all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)", "docker (>=2.5.1)", "graphql-core", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] -apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] +apigateway = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] apigatewayv2 = ["PyYAML (>=5.1)"] appsync = ["graphql-core"] awslambda = ["docker (>=2.5.1)"] @@ -987,6 +987,7 @@ batch = ["docker (>=2.5.1)"] cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)"] cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] ds = ["sshpubkeys (>=3.1.0)"] +dynamodb = ["docker (>=2.5.1)"] dynamodb2 = ["docker (>=2.5.1)"] dynamodbstreams = ["docker (>=2.5.1)"] ec2 = ["sshpubkeys (>=3.1.0)"] @@ -1153,7 +1154,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.8" +version = "6.4.10" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -1166,7 +1167,7 @@ ipython-genutils = "*" jinja2 = "*" jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" -nbconvert = "*" +nbconvert = ">=5" nbformat = "*" nest-asyncio = ">=1.5" prometheus-client = "*" @@ -1721,7 +1722,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "2.0.2" +version = "2.0.3" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false @@ -1760,7 +1761,7 @@ tests = ["html5lib", "networkx", "nose", "doctest-ignore-unicode"] [[package]] name = "redshift-connector" -version = "2.0.904" +version = "2.0.905" description = "Redshift interface library" category = "main" optional = false @@ -1827,7 +1828,7 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake [[package]] name = "restructuredtext-lint" -version = "1.3.2" +version = "1.4.0" description = "reStructuredText linter" category = "dev" optional = false @@ -1850,7 +1851,7 @@ fsspec = ">=0.6.0" [[package]] name = "s3transfer" -version = "0.5.1" +version = "0.5.2" description = "An Amazon S3 Transfer Manager" category = "main" optional = false @@ -2075,14 +2076,14 @@ test = ["pytest"] [[package]] name = "testpath" -version = "0.5.0" +version = "0.6.0" description = "Test utilities for code working with files and commands" category = "dev" optional = false python-versions = ">= 3.5" [package.extras] -test = ["pytest", "pathlib2"] +test = ["pytest"] [[package]] name = "toml" @@ -2165,20 +2166,20 @@ python-versions = ">=3.6" [[package]] name = "urllib3" -version = "1.26.8" +version = "1.26.9" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.13.1" +version = "20.13.4" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -2214,7 +2215,7 @@ python-versions = "*" [[package]] name = "websocket-client" -version = "1.2.3" +version = "1.3.1" description = "WebSocket client for Python with low level API options" category = "dev" optional = false @@ -2307,7 +2308,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "7cde280e5887ede566e2153c4c64860a11ad5943b892b8ba6cc7f3b07867578a" +content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" [metadata.files] aenum = [ @@ -2433,8 +2434,8 @@ argon2-cffi-bindings = [ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, ] asn1crypto = [ - {file = "asn1crypto-1.4.0-py2.py3-none-any.whl", hash = "sha256:4bcdf33c861c7d40bdcd74d8e4dd7661aac320fcdf40b9a3f95b4ee12fde2fa8"}, - {file = "asn1crypto-1.4.0.tar.gz", hash = "sha256:f4f6e119474e58e04a2b1af817eb585b4fd72bdd89b998624712b5c99be7641c"}, + {file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"}, + {file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"}, ] astroid = [ {file = "astroid-2.9.3-py3-none-any.whl", hash = "sha256:506daabe5edffb7e696ad82483ad0228245a9742ed7d2d8c9cdb31537decf9f6"}, @@ -2506,12 +2507,12 @@ bleach = [ {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.20.54-py3-none-any.whl", hash = "sha256:1a272a1dd36414b1626a47bb580425203be0b5a34caa117f38a5e18adf21f918"}, - {file = "boto3-1.20.54.tar.gz", hash = "sha256:8129ad42cc0120d1c63daa18512d6f0b1439e385b2b6e0fe987f116bdf795546"}, + {file = "boto3-1.21.22-py3-none-any.whl", hash = "sha256:a56e34d8dc3390006a6d7ae5373f357917932045e874c82de2736f3b42c02b10"}, + {file = "boto3-1.21.22.tar.gz", hash = "sha256:334f14ffbd89ddd15090e90b32e4fcea73d83b60b19ca2737a9264fd44096f35"}, ] botocore = [ - {file = "botocore-1.23.54-py3-none-any.whl", hash = "sha256:06ae8076c4dcf3d72bec4d37e5f2dce4a92a18a8cdaa3bfaa6e3b7b5e30a8d7e"}, - {file = "botocore-1.23.54.tar.gz", hash = "sha256:4bb9ba16cccee5f5a2602049bc3e2db6865346b2550667f3013bdf33b0a01ceb"}, + {file = "botocore-1.24.22-py3-none-any.whl", hash = "sha256:e812604653c635c78431b3dd168d3fc04e8c3514839226814c999336d5e59ea0"}, + {file = "botocore-1.24.22.tar.gz", hash = "sha256:92ba8afeda48f5d2467811d87df401d703a25191f82882994d8d09a7d8b5b965"}, ] bump2version = [ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, @@ -2578,8 +2579,8 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, - {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, + {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, + {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -2638,26 +2639,26 @@ coverage = [ {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] cryptography = [ - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, - {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, - {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, - {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, - {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, - {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, - {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, - {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, - {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, + {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea634401ca02367c1567f012317502ef3437522e2fc44a3ea1844de028fa4b84"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7be666cc4599b415f320839e36367b273db8501127b38316f3b9f22f17a0b815"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8241cac0aae90b82d6b5c443b853723bcc66963970c67e56e71a2609dc4b5eaf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2d54e787a884ffc6e187262823b6feb06c338084bbe80d45166a1cb1c6c5bf"}, + {file = "cryptography-36.0.2-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:c2c5250ff0d36fd58550252f54915776940e4e866f38f3a7866d92b32a654b86"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ec6597aa85ce03f3e507566b8bcdf9da2227ec86c4266bd5e6ab4d9e0cc8dab2"}, + {file = "cryptography-36.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ca9f686517ec2c4a4ce930207f75c00bf03d94e5063cbc00a1dc42531511b7eb"}, + {file = "cryptography-36.0.2-cp36-abi3-win32.whl", hash = "sha256:f64b232348ee82f13aac22856515ce0195837f6968aeaa94a3d0353ea2ec06a6"}, + {file = "cryptography-36.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:53e0285b49fd0ab6e604f4c5d9c5ddd98de77018542e88366923f152dbeb3c29"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:32db5cc49c73f39aac27574522cecd0a4bb7384e71198bc65a0d23f901e89bb7"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b3d199647468d410994dbeb8cec5816fb74feb9368aedf300af709ef507e3e"}, + {file = "cryptography-36.0.2-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:da73d095f8590ad437cd5e9faf6628a218aa7c387e1fdf67b888b47ba56a17f0"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0a3bf09bb0b7a2c93ce7b98cb107e9170a90c51a0162a20af1c61c765b90e60b"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8897b7b7ec077c819187a123174b645eb680c13df68354ed99f9b40a50898f77"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82740818f2f240a5da8dfb8943b360e4f24022b093207160c77cadade47d7c85"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1f64a62b3b75e4005df19d3b5235abd43fa6358d5516cfc43d87aeba8d08dd51"}, + {file = "cryptography-36.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e167b6b710c7f7bc54e67ef593f8731e1f45aa35f8a8a7b72d6e42ec76afd4b3"}, + {file = "cryptography-36.0.2.tar.gz", hash = "sha256:70f8f4f7bb2ac9f340655cbac89d68c527af5bb4387522a8413e841e3e6628c9"}, ] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, @@ -2887,8 +2888,8 @@ jupyter-client = [ {file = "jupyter_client-7.1.2.tar.gz", hash = "sha256:4ea61033726c8e579edb55626d8ee2e6bf0a83158ddf3751b8dd46b2c5cd1e96"}, ] jupyter-core = [ - {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"}, - {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"}, + {file = "jupyter_core-4.9.2-py3-none-any.whl", hash = "sha256:f875e4d27e202590311d468fa55f90c575f201490bd0c18acabe4e318db4a46d"}, + {file = "jupyter_core-4.9.2.tar.gz", hash = "sha256:d69baeb9ffb128b8cd2657fcf2703f89c769d1673c851812119e3a2a0e93ad9a"}, ] jupyter-server = [ {file = "jupyter_server-1.13.1-py3-none-any.whl", hash = "sha256:abfe55b6cd7bac0d7d7b8042765b7e451f11b5f2276a2ad708745cd8904d4e5b"}, @@ -2946,66 +2947,67 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] lxml = [ - {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, - {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, - {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, - {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, - {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, - {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, - {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, - {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, - {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, - {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, - {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, - {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, - {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, - {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, - {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, - {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, - {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, - {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, - {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, - {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, - {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, - {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, - {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, - {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, - {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, - {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, - {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, - {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, - {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, - {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, - {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, - {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, - {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, - {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, - {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, - {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, + {file = "lxml-4.8.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:e1ab2fac607842ac36864e358c42feb0960ae62c34aa4caaf12ada0a1fb5d99b"}, + {file = "lxml-4.8.0-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28d1af847786f68bec57961f31221125c29d6f52d9187c01cd34dc14e2b29430"}, + {file = "lxml-4.8.0-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b92d40121dcbd74831b690a75533da703750f7041b4bf951befc657c37e5695a"}, + {file = "lxml-4.8.0-cp27-cp27m-win32.whl", hash = "sha256:e01f9531ba5420838c801c21c1b0f45dbc9607cb22ea2cf132844453bec863a5"}, + {file = "lxml-4.8.0-cp27-cp27m-win_amd64.whl", hash = "sha256:6259b511b0f2527e6d55ad87acc1c07b3cbffc3d5e050d7e7bcfa151b8202df9"}, + {file = "lxml-4.8.0-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1010042bfcac2b2dc6098260a2ed022968dbdfaf285fc65a3acf8e4eb1ffd1bc"}, + {file = "lxml-4.8.0-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fa56bb08b3dd8eac3a8c5b7d075c94e74f755fd9d8a04543ae8d37b1612dd170"}, + {file = "lxml-4.8.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:31ba2cbc64516dcdd6c24418daa7abff989ddf3ba6d3ea6f6ce6f2ed6e754ec9"}, + {file = "lxml-4.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:31499847fc5f73ee17dbe1b8e24c6dafc4e8d5b48803d17d22988976b0171f03"}, + {file = "lxml-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5f7d7d9afc7b293147e2d506a4596641d60181a35279ef3aa5778d0d9d9123fe"}, + {file = "lxml-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a3c5f1a719aa11866ffc530d54ad965063a8cbbecae6515acbd5f0fae8f48eaa"}, + {file = "lxml-4.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6268e27873a3d191849204d00d03f65c0e343b3bcb518a6eaae05677c95621d1"}, + {file = "lxml-4.8.0-cp310-cp310-win32.whl", hash = "sha256:330bff92c26d4aee79c5bc4d9967858bdbe73fdbdbacb5daf623a03a914fe05b"}, + {file = "lxml-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2582b238e1658c4061ebe1b4df53c435190d22457642377fd0cb30685cdfb76"}, + {file = "lxml-4.8.0-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a2bfc7e2a0601b475477c954bf167dee6d0f55cb167e3f3e7cefad906e7759f6"}, + {file = "lxml-4.8.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a1547ff4b8a833511eeaceacbcd17b043214fcdb385148f9c1bc5556ca9623e2"}, + {file = "lxml-4.8.0-cp35-cp35m-win32.whl", hash = "sha256:a9f1c3489736ff8e1c7652e9dc39f80cff820f23624f23d9eab6e122ac99b150"}, + {file = "lxml-4.8.0-cp35-cp35m-win_amd64.whl", hash = "sha256:530f278849031b0eb12f46cca0e5db01cfe5177ab13bd6878c6e739319bae654"}, + {file = "lxml-4.8.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:078306d19a33920004addeb5f4630781aaeabb6a8d01398045fcde085091a169"}, + {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:86545e351e879d0b72b620db6a3b96346921fa87b3d366d6c074e5a9a0b8dadb"}, + {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24f5c5ae618395ed871b3d8ebfcbb36e3f1091fd847bf54c4de623f9107942f3"}, + {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bbab6faf6568484707acc052f4dfc3802bdb0cafe079383fbaa23f1cdae9ecd4"}, + {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7993232bd4044392c47779a3c7e8889fea6883be46281d45a81451acfd704d7e"}, + {file = "lxml-4.8.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d6483b1229470e1d8835e52e0ff3c6973b9b97b24cd1c116dca90b57a2cc613"}, + {file = "lxml-4.8.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ad4332a532e2d5acb231a2e5d33f943750091ee435daffca3fec0a53224e7e33"}, + {file = "lxml-4.8.0-cp36-cp36m-win32.whl", hash = "sha256:db3535733f59e5605a88a706824dfcb9bd06725e709ecb017e165fc1d6e7d429"}, + {file = "lxml-4.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5f148b0c6133fb928503cfcdfdba395010f997aa44bcf6474fcdd0c5398d9b63"}, + {file = "lxml-4.8.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:8a31f24e2a0b6317f33aafbb2f0895c0bce772980ae60c2c640d82caac49628a"}, + {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:719544565c2937c21a6f76d520e6e52b726d132815adb3447ccffbe9f44203c4"}, + {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:c0b88ed1ae66777a798dc54f627e32d3b81c8009967c63993c450ee4cbcbec15"}, + {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fa9b7c450be85bfc6cd39f6df8c5b8cbd76b5d6fc1f69efec80203f9894b885f"}, + {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9f84ed9f4d50b74fbc77298ee5c870f67cb7e91dcdc1a6915cb1ff6a317476c"}, + {file = "lxml-4.8.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1d650812b52d98679ed6c6b3b55cbb8fe5a5460a0aef29aeb08dc0b44577df85"}, + {file = "lxml-4.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:80bbaddf2baab7e6de4bc47405e34948e694a9efe0861c61cdc23aa774fcb141"}, + {file = "lxml-4.8.0-cp37-cp37m-win32.whl", hash = "sha256:6f7b82934c08e28a2d537d870293236b1000d94d0b4583825ab9649aef7ddf63"}, + {file = "lxml-4.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e1fd7d2fe11f1cb63d3336d147c852f6d07de0d0020d704c6031b46a30b02ca8"}, + {file = "lxml-4.8.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:5045ee1ccd45a89c4daec1160217d363fcd23811e26734688007c26f28c9e9e7"}, + {file = "lxml-4.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0c1978ff1fd81ed9dcbba4f91cf09faf1f8082c9d72eb122e92294716c605428"}, + {file = "lxml-4.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cbf2ff155b19dc4d4100f7442f6a697938bf4493f8d3b0c51d45568d5666b5"}, + {file = "lxml-4.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ce13d6291a5f47c1c8dbd375baa78551053bc6b5e5c0e9bb8e39c0a8359fd52f"}, + {file = "lxml-4.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11527dc23d5ef44d76fef11213215c34f36af1608074561fcc561d983aeb870"}, + {file = "lxml-4.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:60d2f60bd5a2a979df28ab309352cdcf8181bda0cca4529769a945f09aba06f9"}, + {file = "lxml-4.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:62f93eac69ec0f4be98d1b96f4d6b964855b8255c345c17ff12c20b93f247b68"}, + {file = "lxml-4.8.0-cp38-cp38-win32.whl", hash = "sha256:20b8a746a026017acf07da39fdb10aa80ad9877046c9182442bf80c84a1c4696"}, + {file = "lxml-4.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:891dc8f522d7059ff0024cd3ae79fd224752676447f9c678f2a5c14b84d9a939"}, + {file = "lxml-4.8.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b6fc2e2fb6f532cf48b5fed57567ef286addcef38c28874458a41b7837a57807"}, + {file = "lxml-4.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:74eb65ec61e3c7c019d7169387d1b6ffcfea1b9ec5894d116a9a903636e4a0b1"}, + {file = "lxml-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:627e79894770783c129cc5e89b947e52aa26e8e0557c7e205368a809da4b7939"}, + {file = "lxml-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:545bd39c9481f2e3f2727c78c169425efbfb3fbba6e7db4f46a80ebb249819ca"}, + {file = "lxml-4.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5a58d0b12f5053e270510bf12f753a76aaf3d74c453c00942ed7d2c804ca845c"}, + {file = "lxml-4.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec4b4e75fc68da9dc0ed73dcdb431c25c57775383fec325d23a770a64e7ebc87"}, + {file = "lxml-4.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5804e04feb4e61babf3911c2a974a5b86f66ee227cc5006230b00ac6d285b3a9"}, + {file = "lxml-4.8.0-cp39-cp39-win32.whl", hash = "sha256:aa0cf4922da7a3c905d000b35065df6184c0dc1d866dd3b86fd961905bbad2ea"}, + {file = "lxml-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd10383f1d6b7edf247d0960a3db274c07e96cf3a3fc7c41c8448f93eac3fb1c"}, + {file = "lxml-4.8.0-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:2403a6d6fb61c285969b71f4a3527873fe93fd0abe0832d858a17fe68c8fa507"}, + {file = "lxml-4.8.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:986b7a96228c9b4942ec420eff37556c5777bfba6758edcb95421e4a614b57f9"}, + {file = "lxml-4.8.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6fe4ef4402df0250b75ba876c3795510d782def5c1e63890bde02d622570d39e"}, + {file = "lxml-4.8.0-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:f10ce66fcdeb3543df51d423ede7e238be98412232fca5daec3e54bcd16b8da0"}, + {file = "lxml-4.8.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:730766072fd5dcb219dd2b95c4c49752a54f00157f322bc6d71f7d2a31fecd79"}, + {file = "lxml-4.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8b99ec73073b37f9ebe8caf399001848fced9c08064effdbfc4da2b5a8d07b93"}, + {file = "lxml-4.8.0.tar.gz", hash = "sha256:f63f62fc60e6228a4ca9abae28228f35e1bd3ce675013d1dfb828688d50c6e23"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, @@ -3087,8 +3089,8 @@ mistune = [ {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] moto = [ - {file = "moto-3.0.3-py2.py3-none-any.whl", hash = "sha256:445a574395b8a43a249ae0f932bf10c5cc677054198bfa1ff92e6fbd60e72c38"}, - {file = "moto-3.0.3.tar.gz", hash = "sha256:fa3fbdc22c55d7e70b407e2f2639c48ac82b074f472b167609405c0c1e3a2ccb"}, + {file = "moto-3.1.1-py2.py3-none-any.whl", hash = "sha256:462495563847134ea8ef4135a229731a598a8e7b6b10a74f8d745815aa20a25b"}, + {file = "moto-3.1.1.tar.gz", hash = "sha256:9b5446b3d1f7505d32616209ae09f02123ebc583387f7c182f11e4175754034f"}, ] multidict = [ {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, @@ -3222,8 +3224,8 @@ nest-asyncio = [ {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"}, ] notebook = [ - {file = "notebook-6.4.8-py3-none-any.whl", hash = "sha256:3e702fcc54b8ae597533c3864793b7a1e971dec9e112f67235828d8a798fd654"}, - {file = "notebook-6.4.8.tar.gz", hash = "sha256:1e985c9dc6f678bdfffb9dc657306b5469bfa62d73e03f74e8defbf76d284312"}, + {file = "notebook-6.4.10-py3-none-any.whl", hash = "sha256:49cead814bff0945fcb2ee07579259418672ac175d3dc3d8102a4b0a656ed4df"}, + {file = "notebook-6.4.10.tar.gz", hash = "sha256:2408a76bc6289283a8eecfca67e298ec83c67db51a4c2e1b713dd180bb39e90e"}, ] numpy = [ {file = "numpy-1.18.5-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:e91d31b34fc7c2c8f756b4e902f901f856ae53a93399368d9a0dc7be17ed2ca0"}, @@ -3612,11 +3614,11 @@ pywin32 = [ {file = "pywin32-303-cp39-cp39-win_amd64.whl", hash = "sha256:79cbb862c11b9af19bcb682891c1b91942ec2ff7de8151e2aea2e175899cda34"}, ] pywinpty = [ - {file = "pywinpty-2.0.2-cp310-none-win_amd64.whl", hash = "sha256:4b421379b407bf2f52a64a4c58f61deffe623b5add02d871acb290b771bb6227"}, - {file = "pywinpty-2.0.2-cp37-none-win_amd64.whl", hash = "sha256:238b75fc456a6bc558761a89c9e6b3c8f2f54d79db03ae28997a68313c24b2ca"}, - {file = "pywinpty-2.0.2-cp38-none-win_amd64.whl", hash = "sha256:344858a0b956fdc64a547d5e1980b0257b47f5433ed7cb89bf7b6268cb280c6c"}, - {file = "pywinpty-2.0.2-cp39-none-win_amd64.whl", hash = "sha256:a4a066eaf2e30944d3028d946883ceb7883a499b53c4b89ca2d54bd7a4210550"}, - {file = "pywinpty-2.0.2.tar.gz", hash = "sha256:20ec117183f79642eff555ce0dd1823f942618d65813fb6122d14b6e34b5d05a"}, + {file = "pywinpty-2.0.3-cp310-none-win_amd64.whl", hash = "sha256:7a330ef7a2ce284370b1a1fdd2a80c523585464fa5e5ab934c9f27220fa7feab"}, + {file = "pywinpty-2.0.3-cp37-none-win_amd64.whl", hash = "sha256:6455f1075f978942d318f95616661c605d5e0f991c5b176c0c852d237aafefc0"}, + {file = "pywinpty-2.0.3-cp38-none-win_amd64.whl", hash = "sha256:2e7a288a8121393c526d4e6ec7d65edef75d68c7787ab9560e438df867b75a5d"}, + {file = "pywinpty-2.0.3-cp39-none-win_amd64.whl", hash = "sha256:def51627e6aa659f33ea7a0ea4c6b68365c83af4aad7940600f844746817a0ed"}, + {file = "pywinpty-2.0.3.tar.gz", hash = "sha256:6b29a826e896105370c38d53904c3aaac6c36146a50448fc0ed5082cf9d092bc"}, ] pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"}, @@ -3672,7 +3674,7 @@ rdflib = [ {file = "rdflib-5.0.0.tar.gz", hash = "sha256:78149dd49d385efec3b3adfbd61c87afaf1281c30d3fcaf1b323b34f603fb155"}, ] redshift-connector = [ - {file = "redshift_connector-2.0.904-py3-none-any.whl", hash = "sha256:e8dec071e76bb98aa4f8eeae3ac34fc68618c47a735903093477f3946eb181e1"}, + {file = "redshift_connector-2.0.905-py3-none-any.whl", hash = "sha256:e97074c6a5fb87a3f9697462ebd97072d30bf0ab418dc48574c8ae29e4a7d060"}, ] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, @@ -3687,15 +3689,15 @@ responses = [ {file = "responses-0.17.0.tar.gz", hash = "sha256:ec675e080d06bf8d1fb5e5a68a1e5cd0df46b09c78230315f650af5e4036bec7"}, ] restructuredtext-lint = [ - {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, + {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, ] s3fs = [ {file = "s3fs-0.4.2-py3-none-any.whl", hash = "sha256:91c1dfb45e5217bd441a7a560946fe865ced6225ff7eb0fb459fe6e601a95ed3"}, {file = "s3fs-0.4.2.tar.gz", hash = "sha256:2ca5de8dc18ad7ad350c0bd01aef0406aa5d0fff78a561f0f710f9d9858abdd0"}, ] s3transfer = [ - {file = "s3transfer-0.5.1-py3-none-any.whl", hash = "sha256:25c140f5c66aa79e1ac60be50dcd45ddc59e83895f062a3aab263b870102911f"}, - {file = "s3transfer-0.5.1.tar.gz", hash = "sha256:69d264d3e760e569b78aaa0f22c97e955891cd22e32b10c51f784eeda4d9d10a"}, + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, ] scramp = [ {file = "scramp-1.4.1-py3-none-any.whl", hash = "sha256:93c9cc2ffe54a451e02981c07a5a23cbd830701102789939cfb4ff91efd6ca8c"}, @@ -3769,8 +3771,8 @@ terminado = [ {file = "terminado-0.13.0.tar.gz", hash = "sha256:713531ccb5db7d4f544651f14050da79809030f00d1afa21462088cf32fb143a"}, ] testpath = [ - {file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"}, - {file = "testpath-0.5.0.tar.gz", hash = "sha256:1acf7a0bcd3004ae8357409fc33751e16d37ccc650921da1094a86581ad1e417"}, + {file = "testpath-0.6.0-py3-none-any.whl", hash = "sha256:8ada9f80a2ac6fb0391aa7cdb1a7d11cfa8429f693eda83f74dde570fe6fa639"}, + {file = "testpath-0.6.0.tar.gz", hash = "sha256:2f1b97e6442c02681ebe01bd84f531028a7caea1af3825000f52345c30285e0f"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, @@ -3868,12 +3870,12 @@ typing-extensions = [ {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, ] urllib3 = [ - {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, - {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, + {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, + {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] virtualenv = [ - {file = "virtualenv-20.13.1-py2.py3-none-any.whl", hash = "sha256:45e1d053cad4cd453181ae877c4ffc053546ae99e7dd049b9ff1d9be7491abf7"}, - {file = "virtualenv-20.13.1.tar.gz", hash = "sha256:e0621bcbf4160e4e1030f05065c8834b4e93f4fcc223255db2a823440aca9c14"}, + {file = "virtualenv-20.13.4-py2.py3-none-any.whl", hash = "sha256:c3e01300fb8495bc00ed70741f5271fc95fed067eb7106297be73d30879af60c"}, + {file = "virtualenv-20.13.4.tar.gz", hash = "sha256:ce8901d3bbf3b90393498187f2d56797a8a452fb2d0d7efc6fd837554d6f679c"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -3884,8 +3886,8 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] websocket-client = [ - {file = "websocket-client-1.2.3.tar.gz", hash = "sha256:1315816c0acc508997eb3ae03b9d3ff619c9d12d544c9a9b553704b1cc4f6af5"}, - {file = "websocket_client-1.2.3-py3-none-any.whl", hash = "sha256:2eed4cc58e4d65613ed6114af2f380f7910ff416fc8c46947f6e76b6815f56c0"}, + {file = "websocket-client-1.3.1.tar.gz", hash = "sha256:6278a75065395418283f887de7c3beafb3aa68dada5cacbe4b214e8d26da499b"}, + {file = "websocket_client-1.3.1-py3-none-any.whl", hash = "sha256:074e2ed575e7c822fc0940d31c3ac9bb2b1142c303eafcf3e304e6ce035522e8"}, ] werkzeug = [ {file = "Werkzeug-2.0.3-py3-none-any.whl", hash = "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8"}, diff --git a/pyproject.toml b/pyproject.toml index 192e24963..2321c060b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,6 @@ gremlinpython = "^3.5.2" backoff = "^1.11.1" SPARQLWrapper = "^1.8.5" pyodbc = { version = "~4.0.32", optional = true } -nest-asyncio = "^1.5.4" [tool.poetry.extras] sqlserver = ["pyodbc"] diff --git a/tutorials/033 - Amazon Neptune.ipynb b/tutorials/033 - Amazon Neptune.ipynb index b79c860f6..132dcf615 100644 --- a/tutorials/033 - Amazon Neptune.ipynb +++ b/tutorials/033 - Amazon Neptune.ipynb @@ -5,7 +5,23 @@ "id": "b0ee9a28", "metadata": {}, "source": [ - "# Initialize\n", + "[![AWS Data Wrangler](_static/logo.png \"AWS Data Wrangler\")](https://github.com/awslabs/aws-data-wrangler)" + ] + }, + { + "cell_type": "markdown", + "id": "3a2a7b51", + "metadata": {}, + "source": [ + "# 33 - Amazon Neptune" + ] + }, + { + "cell_type": "markdown", + "id": "42724a76", + "metadata": {}, + "source": [ + "## Initialize\n", "\n", "The first step to using AWS Data Wrangler with Amazon Neptune is to import the library and create a client connection.\n", "\n", @@ -33,7 +49,7 @@ "id": "1e9499ea", "metadata": {}, "source": [ - "# Return the status of the cluster" + "## Return the status of the cluster" ] }, { @@ -51,11 +67,11 @@ "id": "6f13f0cb", "metadata": {}, "source": [ - "# Retrieve Data from Neptune using AWS Data Wrangler\n", + "## Retrieve Data from Neptune using AWS Data Wrangler\n", "\n", "AWS Data Wrangler supports querying Amazon Neptune using TinkerPop Gremlin and openCypher for property graph data or SPARQL for RDF data.\n", "\n", - "## Gremlin" + "### Gremlin" ] }, { @@ -75,7 +91,7 @@ "id": "a7666d80", "metadata": {}, "source": [ - "## SPARQL" + "### SPARQL" ] }, { @@ -95,7 +111,7 @@ "id": "367791b9", "metadata": {}, "source": [ - "## openCypher" + "### openCypher" ] }, { @@ -115,11 +131,11 @@ "id": "f91b967c", "metadata": {}, "source": [ - "# Saving Data using AWS Data Wrangler\n", + "## Saving Data using AWS Data Wrangler\n", "\n", "AWS Data Wrangler supports saving Pandas DataFrames into Amazon Neptune using either a property graph or RDF data model. \n", "\n", - "## Property Graph\n", + "### Property Graph\n", "\n", "If writing to a property graph then DataFrames for vertices and edges must be written separately. DataFrames for vertices must have a `~label` column with the label and a `~id` column for the vertex id.\n", "\n", @@ -129,7 +145,7 @@ "\n", "DataFrames for edges must have a `~id`, `~label`, `~to`, and `~from` column. If the `~id` column does not exist the specified id does not exists, or is empty then a new edge will be added. If no `~label`, `~to`, or `~from` column exists an exception will be thrown.\n", "\n", - "### Add Vertices/Nodes" + "#### Add Vertices/Nodes" ] }, { @@ -164,7 +180,7 @@ "id": "fd5fc8a2", "metadata": {}, "source": [ - "### Add Edges" + "#### Add Edges" ] }, { @@ -200,7 +216,7 @@ "id": "efe6eaaf", "metadata": {}, "source": [ - "### Update Existing Nodes" + "#### Update Existing Nodes" ] }, { @@ -229,7 +245,7 @@ "id": "bff6a1fc", "metadata": {}, "source": [ - "### Setting cardinality based on the header\n", + "#### Setting cardinality based on the header\n", "\n", " If you would like to save data using `single` cardinality then you can postfix (single) to the column header and\n", " set `use_header_cardinality=True` (default). e.g. A column named `name(single)` will save the `name` property as single cardinality. You can disable this by setting by setting `use_header_cardinality=False`." @@ -257,13 +273,13 @@ "id": "beca9dab", "metadata": {}, "source": [ - "## RDF\n", + "### RDF\n", "\n", "The DataFrame must consist of triples with column names for the subject, predicate, and object specified. If none are provided than `s`, `p`, and `o` are the default.\n", "\n", "If you want to add data into a named graph then you will also need the graph column, default is `g`.\n", "\n", - "### Write Triples" + "#### Write Triples" ] }, { @@ -293,7 +309,7 @@ "id": "b7a45c6a", "metadata": {}, "source": [ - "### Write Quads" + "#### Write Quads" ] }, { @@ -321,11 +337,11 @@ "id": "8370b377", "metadata": {}, "source": [ - "# Flatten DataFrames\n", + "## Flatten DataFrames\n", "\n", "One of the complexities of working with a row/columns paradigm, such as Pandas, with graph results set is that it is very common for graph results to return complex and nested objects. To help simplify using the results returned from a graph within a more tabular format we have added a method to flatten the returned Pandas DataFrame.\n", "\n", - "## Flattening the DataFrame" + "### Flattening the DataFrame" ] }, { @@ -350,7 +366,7 @@ "id": "9324bff7", "metadata": {}, "source": [ - "## Removing the prefixing of the parent column name" + "### Removing the prefixing of the parent column name" ] }, { @@ -369,7 +385,7 @@ "id": "21738d39", "metadata": {}, "source": [ - "## Specifying the column header seperator" + "### Specifying the column header seperator" ] }, { @@ -388,7 +404,7 @@ "id": "1bded05b", "metadata": {}, "source": [ - "# Putting it into a workflow" + "## Putting it into a workflow" ] }, { @@ -406,7 +422,7 @@ "id": "cd49d635", "metadata": {}, "source": [ - "## Running PageRank using NetworkX" + "### Running PageRank using NetworkX" ] }, { @@ -445,7 +461,7 @@ "id": "783a599e", "metadata": {}, "source": [ - "## Running PageRank using iGraph" + "### Running PageRank using iGraph" ] }, { From de1acf6d5cf98b5810980c2215872ccbc3ec4b67 Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Mon, 21 Mar 2022 14:59:42 +0000 Subject: [PATCH 30/32] Upgrading dependencies --- building/lambda/build-lambda-layer.sh | 2 +- poetry.lock | 2 +- pyproject.toml | 4 ++-- tests/test_moto.py | 1 + 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/building/lambda/build-lambda-layer.sh b/building/lambda/build-lambda-layer.sh index 3d1aa71cb..298643e69 100644 --- a/building/lambda/build-lambda-layer.sh +++ b/building/lambda/build-lambda-layer.sh @@ -15,7 +15,7 @@ export LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH git clone \ --depth 1 \ - --branch apache-arrow-6.0.1 \ + --branch apache-arrow-7.0.1 \ --single-branch \ https://github.com/apache/arrow.git diff --git a/poetry.lock b/poetry.lock index 7c8007ac5..b69da05b9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2308,7 +2308,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" +content-hash = "d2e1cd7f64223daf7c5582ade19cdb96db4a8665c05dbaafd2bb79ec4a19804a" [metadata.files] aenum = [ diff --git a/pyproject.toml b/pyproject.toml index 2321c060b..4f45412f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,13 +32,13 @@ boto3 = "^1.20.17" botocore = "^1.23.17" pandas = [ { version = "~1.1.0", markers = "python_full_version ~= '3.6.2'" }, - { version = "^1.2.0, < 1.4.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, + { version = "^1.2.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, ] numpy = [ { version = "~1.18.0", markers = "python_full_version ~= '3.6.2'" }, { version = "^1.21.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, ] -pyarrow = ">=2.0.0, <6.1.0" +pyarrow = ">=2.0.0, <7.1.0" redshift-connector = "~2.0.889" pymysql = ">=0.9.0, <1.1.0" pg8000 = ">=1.16.0, <1.23.0" diff --git a/tests/test_moto.py b/tests/test_moto.py index 600d89f8a..b486347c2 100644 --- a/tests/test_moto.py +++ b/tests/test_moto.py @@ -66,6 +66,7 @@ def moto_dynamodb(): TableName="table", KeySchema=[{"AttributeName": "key", "KeyType": "HASH"}], AttributeDefinitions=[{"AttributeName": "key", "AttributeType": "N"}], + BillingMode="PAY_PER_REQUEST", ) yield dynamodb From f48830dfd828c443399e37419298d19d2fe3d2fa Mon Sep 17 00:00:00 2001 From: Dave Bechberger Date: Mon, 21 Mar 2022 07:45:29 -0800 Subject: [PATCH 31/32] Fixed incomplete error message and updated tutorial to remove cluster name and added code to create the df when running cardinality tests. --- awswrangler/neptune/_utils.py | 7 ++++++- tutorials/033 - Amazon Neptune.ipynb | 4 +++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/awswrangler/neptune/_utils.py b/awswrangler/neptune/_utils.py index e46d5426d..ab4072305 100644 --- a/awswrangler/neptune/_utils.py +++ b/awswrangler/neptune/_utils.py @@ -10,6 +10,7 @@ from gremlin_python.process.traversal import Cardinality, T from gremlin_python.structure.graph import Graph +from awswrangler import exceptions from awswrangler.neptune.client import NeptuneClient _logger: logging.Logger = logging.getLogger(__name__) @@ -58,7 +59,11 @@ def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, if res: g = Graph().traversal() else: - raise Exception("Need to fix why this errors") + _logger.debug(res) + raise exceptions.QueryFailed( + """Failed to insert part or all of the data in the DataFrame, + please check the log output for details.""" + ) return _run_gremlin_insert(client, g) diff --git a/tutorials/033 - Amazon Neptune.ipynb b/tutorials/033 - Amazon Neptune.ipynb index 132dcf615..e5cf3e4c3 100644 --- a/tutorials/033 - Amazon Neptune.ipynb +++ b/tutorials/033 - Amazon Neptune.ipynb @@ -38,7 +38,7 @@ "import awswrangler as wr\n", "import pandas as pd\n", "\n", - "url='air-routes-oc.cluster-cei5pmtr7fqq.us-west-2.neptune.amazonaws.com' # The Neptune Cluster endpoint\n", + "url='' # The Neptune Cluster endpoint\n", "iam_enabled = False # Set to True/False based on the configuration of your cluster\n", "neptune_port = 8182 # Set to the Neptune Cluster Port, Default is 8182\n", "client = wr.neptune.connect(url, neptune_port, iam_enabled=iam_enabled)" @@ -258,6 +258,8 @@ "metadata": {}, "outputs": [], "source": [ + "data = [_create_dummy_vertex()]\n", + "df = pd.DataFrame(data)\n", "# Adding (single) to the column name in the DataFrame will cause it to write that property as `single` cardinality\n", "df.rename(columns={\"int\": \"int(single)\"}, inplace=True)\n", "res = wr.neptune.to_property_graph(client, df, use_header_cardinality=True)\n", From 6ac2c6a0e0aa95ccc600d57aa906848c5086e4aa Mon Sep 17 00:00:00 2001 From: Abdel Jaidi Date: Mon, 21 Mar 2022 16:52:16 +0000 Subject: [PATCH 32/32] Minor - validate --- awswrangler/neptune/_utils.py | 3 +-- building/lambda/build-lambda-layer.sh | 2 +- poetry.lock | 2 +- pyproject.toml | 4 ++-- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/awswrangler/neptune/_utils.py b/awswrangler/neptune/_utils.py index ab4072305..ef3f6eb5d 100644 --- a/awswrangler/neptune/_utils.py +++ b/awswrangler/neptune/_utils.py @@ -61,8 +61,7 @@ def write_gremlin_df(client: NeptuneClient, df: pd.DataFrame, mode: WriteDFType, else: _logger.debug(res) raise exceptions.QueryFailed( - """Failed to insert part or all of the data in the DataFrame, - please check the log output for details.""" + """Failed to insert part or all of the data in the DataFrame, please check the log output.""" ) return _run_gremlin_insert(client, g) diff --git a/building/lambda/build-lambda-layer.sh b/building/lambda/build-lambda-layer.sh index 298643e69..3d1aa71cb 100644 --- a/building/lambda/build-lambda-layer.sh +++ b/building/lambda/build-lambda-layer.sh @@ -15,7 +15,7 @@ export LD_LIBRARY_PATH=$(pwd)/dist/lib:$LD_LIBRARY_PATH git clone \ --depth 1 \ - --branch apache-arrow-7.0.1 \ + --branch apache-arrow-6.0.1 \ --single-branch \ https://github.com/apache/arrow.git diff --git a/poetry.lock b/poetry.lock index b69da05b9..7c8007ac5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2308,7 +2308,7 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.11" -content-hash = "d2e1cd7f64223daf7c5582ade19cdb96db4a8665c05dbaafd2bb79ec4a19804a" +content-hash = "80f13438c6033d51c3020003cf52d6df176ec57c5bad69110945a01ec57f2785" [metadata.files] aenum = [ diff --git a/pyproject.toml b/pyproject.toml index 4f45412f5..2321c060b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,13 +32,13 @@ boto3 = "^1.20.17" botocore = "^1.23.17" pandas = [ { version = "~1.1.0", markers = "python_full_version ~= '3.6.2'" }, - { version = "^1.2.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, + { version = "^1.2.0, < 1.4.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, ] numpy = [ { version = "~1.18.0", markers = "python_full_version ~= '3.6.2'" }, { version = "^1.21.0", markers = "python_full_version >= '3.7.1' and python_full_version < '4.0.0'" }, ] -pyarrow = ">=2.0.0, <7.1.0" +pyarrow = ">=2.0.0, <6.1.0" redshift-connector = "~2.0.889" pymysql = ">=0.9.0, <1.1.0" pg8000 = ">=1.16.0, <1.23.0"