diff --git a/.github/workflows/minimal-tests.yml b/.github/workflows/minimal-tests.yml index 0f467c62f..60a31194b 100644 --- a/.github/workflows/minimal-tests.yml +++ b/.github/workflows/minimal-tests.yml @@ -4,7 +4,6 @@ on: push: branches: - main - - main-governed-tables pull_request: branches: - main diff --git a/.github/workflows/static-checking.yml b/.github/workflows/static-checking.yml index 88eded943..b21320c4f 100644 --- a/.github/workflows/static-checking.yml +++ b/.github/workflows/static-checking.yml @@ -4,7 +4,6 @@ on: push: branches: - main - - main-governed-tables pull_request: branches: - main diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e898ec21e..cd7a6af50 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -153,9 +153,9 @@ or ``cd scripts`` -* Deploy the Cloudformation template `base.yaml` +* Deploy the `base` CDK stack -``./deploy-base.sh`` +``./deploy-stack.sh base`` * Return to the project root directory @@ -175,7 +175,7 @@ or * [OPTIONAL] To remove the base test environment cloud formation stack post testing: -``./test_infra/scripts/delete-base.sh`` +``./test_infra/scripts/delete-stack.sh base`` ### Full test environment @@ -210,14 +210,18 @@ or ``cd scripts`` -* Deploy the Cloudformation templates `base.yaml` and `databases.yaml`. This step could take about 15 minutes to deploy. +* Deploy the `base` and `databases` CDK stacks. This step could take about 15 minutes to deploy. -``./deploy-base.sh`` -``./deploy-databases.sh`` +``./deploy-stack.sh base`` +``./deploy-stack.sh databases`` -* [OPTIONAL] Deploy the Cloudformation template `opensearch.yaml` (if you need to test Amazon OpenSearch Service). This step could take about 15 minutes to deploy. +* [OPTIONAL] Deploy the `lakeformation` CDK stack (if you need to test against the AWS Lake Formation Service). You must ensure Lake Formation is enabled in the account. -``./deploy-opensearch.sh`` +``./deploy-stack.sh lakeformation`` + +* [OPTIONAL] Deploy the `opensearch` CDK stack (if you need to test against the Amazon OpenSearch Service). This step could take about 15 minutes to deploy. + +``./deploy-stack.sh opensearch`` * Go to the `EC2 -> SecurityGroups` console, open the `aws-data-wrangler-*` security group and configure to accept your IP from any TCP port. - Alternatively run: @@ -254,9 +258,9 @@ or * [OPTIONAL] To remove the base test environment cloud formation stack post testing: -``./test_infra/scripts/delete-base.sh`` +``./test_infra/scripts/delete-stack.sh base`` -``./test_infra/scripts/delete-databases.sh`` +``./test_infra/scripts/delete-stack.sh databases`` ## Recommended Visual Studio Code Recommended setting diff --git a/README.md b/README.md index d1b918b38..d3799ff16 100644 --- a/README.md +++ b/README.md @@ -139,6 +139,7 @@ FROM "sampleDB"."sampleTable" ORDER BY time DESC LIMIT 3 - [029 - S3 Select](https://github.com/awslabs/aws-data-wrangler/blob/main/tutorials/029%20-%20S3%20Select.ipynb) - [030 - Data Api](https://github.com/awslabs/aws-data-wrangler/blob/main/tutorials/030%20-%20Data%20Api.ipynb) - [031 - OpenSearch](https://github.com/awslabs/aws-data-wrangler/blob/main/tutorials/031%20-%20OpenSearch.ipynb) + - [032 - Lake Formation Governed Tables](https://github.com/awslabs/aws-data-wrangler/blob/main/tutorials/032%20-%Lake%20Formation%20Governed%20Tables.ipynb) - [**API Reference**](https://aws-data-wrangler.readthedocs.io/en/2.12.1/api.html) - [Amazon S3](https://aws-data-wrangler.readthedocs.io/en/2.12.1/api.html#amazon-s3) - [AWS Glue Catalog](https://aws-data-wrangler.readthedocs.io/en/2.12.1/api.html#aws-glue-catalog) diff --git a/awswrangler/__init__.py b/awswrangler/__init__.py index c87d36823..c3d5afe1f 100644 --- a/awswrangler/__init__.py +++ b/awswrangler/__init__.py @@ -16,6 +16,7 @@ dynamodb, emr, exceptions, + lakeformation, mysql, opensearch, postgresql, @@ -44,6 +45,7 @@ "s3", "sts", "redshift", + "lakeformation", "mysql", "postgresql", "secretsmanager", diff --git a/awswrangler/_config.py b/awswrangler/_config.py index 54b77e653..babf92f93 100644 --- a/awswrangler/_config.py +++ b/awswrangler/_config.py @@ -43,6 +43,7 @@ class _ConfigArg(NamedTuple): "redshift_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), "kms_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), "emr_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), + "lakeformation_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), "dynamodb_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), "secretsmanager_endpoint_url": _ConfigArg(dtype=str, nullable=True, enforced=True), # Botocore config @@ -50,7 +51,7 @@ class _ConfigArg(NamedTuple): } -class _Config: # pylint: disable=too-many-instance-attributes, too-many-public-methods +class _Config: # pylint: disable=too-many-instance-attributes,too-many-public-methods """Wrangler's Configuration class.""" def __init__(self) -> None: @@ -63,6 +64,7 @@ def __init__(self) -> None: self.redshift_endpoint_url = None self.kms_endpoint_url = None self.emr_endpoint_url = None + self.lakeformation_endpoint_url = None self.dynamodb_endpoint_url = None self.secretsmanager_endpoint_url = None self.botocore_config = None @@ -356,6 +358,15 @@ def emr_endpoint_url(self) -> Optional[str]: def emr_endpoint_url(self, value: Optional[str]) -> None: self._set_config_value(key="emr_endpoint_url", value=value) + @property + def lakeformation_endpoint_url(self) -> Optional[str]: + """Property lakeformation_endpoint_url.""" + return cast(Optional[str], self["lakeformation_endpoint_url"]) + + @lakeformation_endpoint_url.setter + def lakeformation_endpoint_url(self, value: Optional[str]) -> None: + self._set_config_value(key="lakeformation_endpoint_url", value=value) + @property def dynamodb_endpoint_url(self) -> Optional[str]: """Property dynamodb_endpoint_url.""" diff --git a/awswrangler/_utils.py b/awswrangler/_utils.py index bf04f642c..47dec6ab8 100644 --- a/awswrangler/_utils.py +++ b/awswrangler/_utils.py @@ -93,6 +93,8 @@ def _get_endpoint_url(service_name: str) -> Optional[str]: endpoint_url = _config.config.kms_endpoint_url elif service_name == "emr" and _config.config.emr_endpoint_url is not None: endpoint_url = _config.config.emr_endpoint_url + elif service_name == "lakeformation" and _config.config.lakeformation_endpoint_url is not None: + endpoint_url = _config.config.lakeformation_endpoint_url elif service_name == "dynamodb" and _config.config.dynamodb_endpoint_url is not None: endpoint_url = _config.config.dynamodb_endpoint_url elif service_name == "secretsmanager" and _config.config.secretsmanager_endpoint_url is not None: diff --git a/awswrangler/catalog/_add.py b/awswrangler/catalog/_add.py index 1fe5c23b4..0e6c6cf7c 100644 --- a/awswrangler/catalog/_add.py +++ b/awswrangler/catalog/_add.py @@ -14,7 +14,7 @@ _parquet_partition_definition, _update_table_definition, ) -from awswrangler.catalog._utils import _catalog_id, sanitize_table_name +from awswrangler.catalog._utils import _catalog_id, _transaction_id, sanitize_table_name _logger: logging.Logger = logging.getLogger(__name__) @@ -300,7 +300,8 @@ def add_column( table: str, column_name: str, column_type: str = "string", - column_comment: Optional[str] = "", + column_comment: Optional[str] = None, + transaction_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, catalog_id: Optional[str] = None, ) -> None: @@ -318,6 +319,8 @@ def add_column( Column type. column_comment : str Column Comment + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). boto3_session : boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. catalog_id : str, optional @@ -341,13 +344,21 @@ def add_column( """ if _check_column_type(column_type): client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) - table_res: Dict[str, Any] = client_glue.get_table(DatabaseName=database, Name=table) + table_res: Dict[str, Any] = client_glue.get_table( + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, Name=table), + ) + ) table_input: Dict[str, Any] = _update_table_definition(table_res) table_input["StorageDescriptor"]["Columns"].append( {"Name": column_name, "Type": column_type, "Comment": column_comment} ) res: Dict[str, Any] = client_glue.update_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, TableInput=table_input) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, TableInput=table_input), + ) ) if ("Errors" in res) and res["Errors"]: for error in res["Errors"]: diff --git a/awswrangler/catalog/_create.py b/awswrangler/catalog/_create.py index adefbff1b..ac61867a3 100644 --- a/awswrangler/catalog/_create.py +++ b/awswrangler/catalog/_create.py @@ -10,7 +10,7 @@ from awswrangler.catalog._definitions import _csv_table_definition, _json_table_definition, _parquet_table_definition from awswrangler.catalog._delete import delete_all_partitions, delete_table_if_exists from awswrangler.catalog._get import _get_table_input -from awswrangler.catalog._utils import _catalog_id, sanitize_column_name, sanitize_table_name +from awswrangler.catalog._utils import _catalog_id, _transaction_id, sanitize_column_name, sanitize_table_name _logger: logging.Logger = logging.getLogger(__name__) @@ -24,7 +24,7 @@ def _update_if_necessary(dic: Dict[str, str], key: str, value: Optional[str], mo return mode -def _create_table( # pylint: disable=too-many-branches,too-many-statements +def _create_table( # pylint: disable=too-many-branches,too-many-statements,too-many-locals database: str, table: str, description: Optional[str], @@ -33,10 +33,12 @@ def _create_table( # pylint: disable=too-many-branches,too-many-statements catalog_versioning: bool, boto3_session: Optional[boto3.Session], table_input: Dict[str, Any], + table_type: Optional[str], table_exist: bool, projection_enabled: bool, partitions_types: Optional[Dict[str, str]], columns_comments: Optional[Dict[str, str]], + transaction_id: Optional[str], projection_types: Optional[Dict[str, str]], projection_ranges: Optional[Dict[str, str]], projection_values: Optional[Dict[str, str]], @@ -127,28 +129,27 @@ def _create_table( # pylint: disable=too-many-branches,too-many-statements raise exceptions.InvalidArgument( f"{mode} is not a valid mode. It must be 'overwrite', 'append' or 'overwrite_partitions'." ) - if table_exist is True and mode == "overwrite": - delete_all_partitions(table=table, database=database, catalog_id=catalog_id, boto3_session=session) + args: Dict[str, Any] = _catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, + DatabaseName=database, + TableInput=table_input, + ), + ) + if table_exist: _logger.debug("Updating table (%s)...", mode) - client_glue.update_table( - **_catalog_id( - catalog_id=catalog_id, DatabaseName=database, TableInput=table_input, SkipArchive=skip_archive - ) - ) - elif (table_exist is True) and (mode in ("append", "overwrite_partitions", "update")): - if mode == "update": - _logger.debug("Updating table (%s)...", mode) - client_glue.update_table( - **_catalog_id( - catalog_id=catalog_id, DatabaseName=database, TableInput=table_input, SkipArchive=skip_archive - ) - ) - elif table_exist is False: + args["SkipArchive"] = skip_archive + if mode == "overwrite": + if table_type != "GOVERNED": + delete_all_partitions(table=table, database=database, catalog_id=catalog_id, boto3_session=session) + client_glue.update_table(**args) + elif mode == "update": + client_glue.update_table(**args) + else: try: _logger.debug("Creating table (%s)...", mode) - client_glue.create_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, TableInput=table_input) - ) + client_glue.create_table(**args) except client_glue.exceptions.AlreadyExistsException: if mode == "overwrite": _utils.try_it( @@ -159,6 +160,7 @@ def _create_table( # pylint: disable=too-many-branches,too-many-statements database=database, table=table, table_input=table_input, + transaction_id=transaction_id, boto3_session=boto3_session, ) _logger.debug("Leaving table as is (%s)...", mode) @@ -170,15 +172,31 @@ def _overwrite_table( database: str, table: str, table_input: Dict[str, Any], + transaction_id: Optional[str], boto3_session: boto3.Session, ) -> None: - delete_table_if_exists(database=database, table=table, boto3_session=boto3_session, catalog_id=catalog_id) - client_glue.create_table(**_catalog_id(catalog_id=catalog_id, DatabaseName=database, TableInput=table_input)) + delete_table_if_exists( + database=database, + table=table, + transaction_id=transaction_id, + boto3_session=boto3_session, + catalog_id=catalog_id, + ) + args: Dict[str, Any] = _catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, + DatabaseName=database, + TableInput=table_input, + ), + ) + client_glue.create_table(**args) def _upsert_table_parameters( parameters: Dict[str, str], database: str, + transaction_id: Optional[str], catalog_versioning: bool, catalog_id: Optional[str], table_input: Dict[str, Any], @@ -194,6 +212,7 @@ def _upsert_table_parameters( _overwrite_table_parameters( parameters=pars, database=database, + transaction_id=transaction_id, catalog_id=catalog_id, boto3_session=boto3_session, table_input=table_input, @@ -205,6 +224,7 @@ def _upsert_table_parameters( def _overwrite_table_parameters( parameters: Dict[str, str], database: str, + transaction_id: Optional[str], catalog_versioning: bool, catalog_id: Optional[str], table_input: Dict[str, Any], @@ -214,7 +234,12 @@ def _overwrite_table_parameters( client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) skip_archive: bool = not catalog_versioning client_glue.update_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, TableInput=table_input, SkipArchive=skip_archive) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, DatabaseName=database, TableInput=table_input, SkipArchive=skip_archive + ), + ) ) return parameters @@ -224,6 +249,7 @@ def _create_parquet_table( table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Optional[Dict[str, str]], bucketing_info: Optional[Tuple[List[str], int]], catalog_id: Optional[str], @@ -234,6 +260,7 @@ def _create_parquet_table( mode: str, catalog_versioning: bool, projection_enabled: bool, + transaction_id: Optional[str], projection_types: Optional[Dict[str, str]], projection_ranges: Optional[Dict[str, str]], projection_values: Optional[Dict[str, str]], @@ -264,6 +291,7 @@ def _create_parquet_table( table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, compression=compression, @@ -280,8 +308,10 @@ def _create_parquet_table( catalog_versioning=catalog_versioning, boto3_session=boto3_session, table_input=table_input, + table_type=table_type, table_exist=table_exist, partitions_types=partitions_types, + transaction_id=transaction_id, projection_enabled=projection_enabled, projection_types=projection_types, projection_ranges=projection_ranges, @@ -293,11 +323,12 @@ def _create_parquet_table( ) -def _create_csv_table( # pylint: disable=too-many-arguments +def _create_csv_table( # pylint: disable=too-many-arguments,too-many-locals database: str, table: str, - path: str, + path: Optional[str], columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Optional[Dict[str, str]], bucketing_info: Optional[Tuple[List[str], int]], description: Optional[str], @@ -305,6 +336,7 @@ def _create_csv_table( # pylint: disable=too-many-arguments parameters: Optional[Dict[str, str]], columns_comments: Optional[Dict[str, str]], mode: str, + transaction_id: Optional[str], catalog_versioning: bool, schema_evolution: bool, sep: str, @@ -335,6 +367,7 @@ def _create_csv_table( # pylint: disable=too-many-arguments table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, compression=compression, @@ -355,8 +388,10 @@ def _create_csv_table( # pylint: disable=too-many-arguments catalog_versioning=catalog_versioning, boto3_session=boto3_session, table_input=table_input, + table_type=table_type, table_exist=table_exist, partitions_types=partitions_types, + transaction_id=transaction_id, projection_enabled=projection_enabled, projection_types=projection_types, projection_ranges=projection_ranges, @@ -373,6 +408,7 @@ def _create_json_table( # pylint: disable=too-many-arguments table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Optional[Dict[str, str]], bucketing_info: Optional[Tuple[List[str], int]], description: Optional[str], @@ -382,6 +418,7 @@ def _create_json_table( # pylint: disable=too-many-arguments mode: str, catalog_versioning: bool, schema_evolution: bool, + transaction_id: Optional[str], serde_library: Optional[str], serde_parameters: Optional[Dict[str, str]], boto3_session: Optional[boto3.Session], @@ -408,6 +445,7 @@ def _create_json_table( # pylint: disable=too-many-arguments table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, compression=compression, @@ -424,8 +462,10 @@ def _create_json_table( # pylint: disable=too-many-arguments columns_comments=columns_comments, mode=mode, catalog_versioning=catalog_versioning, + transaction_id=transaction_id, boto3_session=boto3_session, table_input=table_input, + table_type=table_type, table_exist=table_exist, partitions_types=partitions_types, projection_enabled=projection_enabled, @@ -444,6 +484,7 @@ def upsert_table_parameters( parameters: Dict[str, str], database: str, table: str, + transaction_id: Optional[str] = None, catalog_versioning: bool = False, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, @@ -458,6 +499,8 @@ def upsert_table_parameters( Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). catalog_versioning : bool If True and `mode="overwrite"`, creates an archived version of the table catalog before updating it. catalog_id : str, optional @@ -482,7 +525,7 @@ def upsert_table_parameters( """ session: boto3.Session = _utils.ensure_session(session=boto3_session) table_input: Optional[Dict[str, str]] = _get_table_input( - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) if table_input is None: raise exceptions.InvalidArgumentValue(f"Table {database}.{table} does not exist.") @@ -490,6 +533,7 @@ def upsert_table_parameters( parameters=parameters, database=database, boto3_session=session, + transaction_id=transaction_id, catalog_id=catalog_id, table_input=table_input, catalog_versioning=catalog_versioning, @@ -501,6 +545,7 @@ def overwrite_table_parameters( parameters: Dict[str, str], database: str, table: str, + transaction_id: Optional[str] = None, catalog_versioning: bool = False, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, @@ -515,6 +560,8 @@ def overwrite_table_parameters( Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). catalog_versioning : bool If True and `mode="overwrite"`, creates an archived version of the table catalog before updating it. catalog_id : str, optional @@ -539,7 +586,7 @@ def overwrite_table_parameters( """ session: boto3.Session = _utils.ensure_session(session=boto3_session) table_input: Optional[Dict[str, Any]] = _get_table_input( - database=database, table=table, catalog_id=catalog_id, boto3_session=session + database=database, table=table, transaction_id=transaction_id, catalog_id=catalog_id, boto3_session=session ) if table_input is None: raise exceptions.InvalidTable(f"Table {table} does not exist on database {database}.") @@ -547,6 +594,7 @@ def overwrite_table_parameters( parameters=parameters, database=database, catalog_id=catalog_id, + transaction_id=transaction_id, table_input=table_input, boto3_session=session, catalog_versioning=catalog_versioning, @@ -611,6 +659,7 @@ def create_parquet_table( table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str] = None, partitions_types: Optional[Dict[str, str]] = None, bucketing_info: Optional[Tuple[List[str], int]] = None, catalog_id: Optional[str] = None, @@ -620,6 +669,7 @@ def create_parquet_table( columns_comments: Optional[Dict[str, str]] = None, mode: str = "overwrite", catalog_versioning: bool = False, + transaction_id: Optional[str] = None, projection_enabled: bool = False, projection_types: Optional[Dict[str, str]] = None, projection_ranges: Optional[Dict[str, str]] = None, @@ -643,6 +693,8 @@ def create_parquet_table( Amazon S3 path (e.g. s3://bucket/prefix/). columns_types: Dict[str, str] Dictionary with keys as column names and values as data types (e.g. {'col0': 'bigint', 'col1': 'double'}). + table_type: str, optional + The type of the Glue Table (EXTERNAL_TABLE, GOVERNED...). Set to EXTERNAL_TABLE if None partitions_types: Dict[str, str], optional Dictionary with keys as partition names and values as data types (e.g. {'col2': 'date'}). bucketing_info: Tuple[List[str], int], optional @@ -664,6 +716,8 @@ def create_parquet_table( 'overwrite' to recreate any possible existing table or 'append' to keep any possible existing table. catalog_versioning : bool If True and `mode="overwrite"`, creates an archived version of the table catalog before updating it. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). projection_enabled : bool Enable Partition Projection on Athena (https://docs.aws.amazon.com/athena/latest/ug/partition-projection.html) projection_types : Optional[Dict[str, str]] @@ -718,13 +772,14 @@ def create_parquet_table( """ session: boto3.Session = _utils.ensure_session(session=boto3_session) catalog_table_input: Optional[Dict[str, Any]] = _get_table_input( - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) _create_parquet_table( database=database, table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, catalog_id=catalog_id, @@ -734,6 +789,7 @@ def create_parquet_table( columns_comments=columns_comments, mode=mode, catalog_versioning=catalog_versioning, + transaction_id=transaction_id, projection_enabled=projection_enabled, projection_types=projection_types, projection_ranges=projection_ranges, @@ -752,6 +808,7 @@ def create_csv_table( # pylint: disable=too-many-arguments table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str] = None, partitions_types: Optional[Dict[str, str]] = None, bucketing_info: Optional[Tuple[List[str], int]] = None, compression: Optional[str] = None, @@ -765,6 +822,7 @@ def create_csv_table( # pylint: disable=too-many-arguments skip_header_line_count: Optional[int] = None, serde_library: Optional[str] = None, serde_parameters: Optional[Dict[str, str]] = None, + transaction_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, projection_enabled: bool = False, projection_types: Optional[Dict[str, str]] = None, @@ -789,6 +847,8 @@ def create_csv_table( # pylint: disable=too-many-arguments Amazon S3 path (e.g. s3://bucket/prefix/). columns_types: Dict[str, str] Dictionary with keys as column names and values as data types (e.g. {'col0': 'bigint', 'col1': 'double'}). + table_type: str, optional + The type of the Glue Table (EXTERNAL_TABLE, GOVERNED...). Set to EXTERNAL_TABLE if None partitions_types: Dict[str, str], optional Dictionary with keys as partition names and values as data types (e.g. {'col2': 'date'}). bucketing_info: Tuple[List[str], int], optional @@ -823,6 +883,8 @@ def create_csv_table( # pylint: disable=too-many-arguments serde_parameters : Optional[str] Dictionary of initialization parameters for the SerDe. The default is `{"field.delim": sep, "escape.delim": "\\"}`. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). projection_enabled : bool Enable Partition Projection on Athena (https://docs.aws.amazon.com/athena/latest/ug/partition-projection.html) projection_types : Optional[Dict[str, str]] @@ -880,13 +942,14 @@ def create_csv_table( # pylint: disable=too-many-arguments """ session: boto3.Session = _utils.ensure_session(session=boto3_session) catalog_table_input: Optional[Dict[str, Any]] = _get_table_input( - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) _create_csv_table( database=database, table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, catalog_id=catalog_id, @@ -896,6 +959,7 @@ def create_csv_table( # pylint: disable=too-many-arguments columns_comments=columns_comments, mode=mode, catalog_versioning=catalog_versioning, + transaction_id=transaction_id, schema_evolution=schema_evolution, projection_enabled=projection_enabled, projection_types=projection_types, @@ -914,11 +978,12 @@ def create_csv_table( # pylint: disable=too-many-arguments @apply_configs -def create_json_table( +def create_json_table( # pylint: disable=too-many-arguments database: str, table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str] = None, partitions_types: Optional[Dict[str, str]] = None, bucketing_info: Optional[Tuple[List[str], int]] = None, compression: Optional[str] = None, @@ -930,6 +995,7 @@ def create_json_table( schema_evolution: bool = False, serde_library: Optional[str] = None, serde_parameters: Optional[Dict[str, str]] = None, + transaction_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, projection_enabled: bool = False, projection_types: Optional[Dict[str, str]] = None, @@ -954,6 +1020,8 @@ def create_json_table( Amazon S3 path (e.g. s3://bucket/prefix/). columns_types: Dict[str, str] Dictionary with keys as column names and values as data types (e.g. {'col0': 'bigint', 'col1': 'double'}). + table_type: str, optional + The type of the Glue Table (EXTERNAL_TABLE, GOVERNED...). Set to EXTERNAL_TABLE if None partitions_types: Dict[str, str], optional Dictionary with keys as partition names and values as data types (e.g. {'col2': 'date'}). bucketing_info: Tuple[List[str], int], optional @@ -984,6 +1052,8 @@ def create_json_table( serde_parameters : Optional[str] Dictionary of initialization parameters for the SerDe. The default is `{"field.delim": sep, "escape.delim": "\\"}`. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). projection_enabled : bool Enable Partition Projection on Athena (https://docs.aws.amazon.com/athena/latest/ug/partition-projection.html) projection_types : Optional[Dict[str, str]] @@ -1047,6 +1117,7 @@ def create_json_table( table=table, path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, catalog_id=catalog_id, @@ -1056,6 +1127,7 @@ def create_json_table( columns_comments=columns_comments, mode=mode, catalog_versioning=catalog_versioning, + transaction_id=transaction_id, schema_evolution=schema_evolution, projection_enabled=projection_enabled, projection_types=projection_types, diff --git a/awswrangler/catalog/_definitions.py b/awswrangler/catalog/_definitions.py index 99b4501ca..9a3d96f9b 100644 --- a/awswrangler/catalog/_definitions.py +++ b/awswrangler/catalog/_definitions.py @@ -31,6 +31,7 @@ def _parquet_table_definition( table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Dict[str, str], bucketing_info: Optional[Tuple[List[str], int]], compression: Optional[str], @@ -39,7 +40,7 @@ def _parquet_table_definition( return { "Name": table, "PartitionKeys": [{"Name": cname, "Type": dtype} for cname, dtype in partitions_types.items()], - "TableType": "EXTERNAL_TABLE", + "TableType": "EXTERNAL_TABLE" if table_type is None else table_type, "Parameters": {"classification": "parquet", "compressionType": str(compression).lower(), "typeOfData": "file"}, "StorageDescriptor": { "Columns": [{"Name": cname, "Type": dtype} for cname, dtype in columns_types.items()], @@ -100,8 +101,9 @@ def _parquet_partition_definition( def _csv_table_definition( table: str, - path: str, + path: Optional[str], columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Dict[str, str], bucketing_info: Optional[Tuple[List[str], int]], compression: Optional[str], @@ -130,7 +132,7 @@ def _csv_table_definition( return { "Name": table, "PartitionKeys": [{"Name": cname, "Type": dtype} for cname, dtype in partitions_types.items()], - "TableType": "EXTERNAL_TABLE", + "TableType": "EXTERNAL_TABLE" if table_type is None else table_type, "Parameters": parameters, "StorageDescriptor": { "Columns": [{"Name": cname, "Type": dtype} for cname, dtype in columns_types.items()], @@ -191,6 +193,7 @@ def _json_table_definition( table: str, path: str, columns_types: Dict[str, str], + table_type: Optional[str], partitions_types: Dict[str, str], bucketing_info: Optional[Tuple[List[str], int]], compression: Optional[str], @@ -210,7 +213,7 @@ def _json_table_definition( return { "Name": table, "PartitionKeys": [{"Name": cname, "Type": dtype} for cname, dtype in partitions_types.items()], - "TableType": "EXTERNAL_TABLE", + "TableType": "EXTERNAL_TABLE" if table_type is None else table_type, "Parameters": parameters, "StorageDescriptor": { "Columns": [{"Name": cname, "Type": dtype} for cname, dtype in columns_types.items()], diff --git a/awswrangler/catalog/_delete.py b/awswrangler/catalog/_delete.py index b14893e60..27bdb07e8 100644 --- a/awswrangler/catalog/_delete.py +++ b/awswrangler/catalog/_delete.py @@ -9,7 +9,7 @@ from awswrangler._config import apply_configs from awswrangler.catalog._definitions import _update_table_definition from awswrangler.catalog._get import _get_partitions -from awswrangler.catalog._utils import _catalog_id +from awswrangler.catalog._utils import _catalog_id, _transaction_id _logger: logging.Logger = logging.getLogger(__name__) @@ -46,7 +46,11 @@ def delete_database(name: str, catalog_id: Optional[str] = None, boto3_session: @apply_configs def delete_table_if_exists( - database: str, table: str, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> bool: """Delete Glue table if exists. @@ -56,6 +60,8 @@ def delete_table_if_exists( Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). catalog_id : str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. @@ -78,7 +84,13 @@ def delete_table_if_exists( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) try: - client_glue.delete_table(**_catalog_id(DatabaseName=database, Name=table, catalog_id=catalog_id)) + client_glue.delete_table( + **_catalog_id( + **_transaction_id( + transaction_id=transaction_id, DatabaseName=database, Name=table, catalog_id=catalog_id + ) + ) + ) return True except client_glue.exceptions.EntityNotFoundException: return False @@ -189,6 +201,7 @@ def delete_column( database: str, table: str, column_name: str, + transaction_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None, catalog_id: Optional[str] = None, ) -> None: @@ -202,6 +215,8 @@ def delete_column( Table name. column_name : str Column name + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). boto3_session : boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. catalog_id : str, optional @@ -224,14 +239,20 @@ def delete_column( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) table_res: Dict[str, Any] = client_glue.get_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, Name=table), + ) ) table_input: Dict[str, Any] = _update_table_definition(table_res) table_input["StorageDescriptor"]["Columns"] = [ i for i in table_input["StorageDescriptor"]["Columns"] if i["Name"] != column_name ] res: Dict[str, Any] = client_glue.update_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, TableInput=table_input) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, TableInput=table_input), + ) ) if ("Errors" in res) and res["Errors"]: for error in res["Errors"]: diff --git a/awswrangler/catalog/_get.py b/awswrangler/catalog/_get.py index 717e570ec..575ca6857 100644 --- a/awswrangler/catalog/_get.py +++ b/awswrangler/catalog/_get.py @@ -12,19 +12,24 @@ from awswrangler import _utils, exceptions from awswrangler._config import apply_configs -from awswrangler.catalog._utils import _catalog_id, _extract_dtypes_from_table_details +from awswrangler.catalog._utils import _catalog_id, _extract_dtypes_from_table_details, _transaction_id _logger: logging.Logger = logging.getLogger(__name__) def _get_table_input( - database: str, table: str, boto3_session: Optional[boto3.Session], catalog_id: Optional[str] = None + database: str, + table: str, + boto3_session: Optional[boto3.Session], + transaction_id: Optional[str] = None, + catalog_id: Optional[str] = None, ) -> Optional[Dict[str, Any]]: client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) + args: Dict[str, Any] = _catalog_id( + catalog_id=catalog_id, **_transaction_id(transaction_id=transaction_id, DatabaseName=database, Name=table) + ) try: - response: Dict[str, Any] = client_glue.get_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table) - ) + response: Dict[str, Any] = client_glue.get_table(**args) except client_glue.exceptions.EntityNotFoundException: return None table_input: Dict[str, Any] = {} @@ -71,16 +76,15 @@ def _get_partitions( ) -> Dict[str, List[str]]: client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) - args: Dict[str, Any] = { - "DatabaseName": database, - "TableName": table, - "MaxResults": 1_000, - "Segment": {"SegmentNumber": 0, "TotalSegments": 1}, - } + args: Dict[str, Any] = _catalog_id( + catalog_id=catalog_id, + DatabaseName=database, + TableName=table, + MaxResults=1_000, + Segment={"SegmentNumber": 0, "TotalSegments": 1}, + ) if expression is not None: args["Expression"] = expression - if catalog_id is not None: - args["CatalogId"] = catalog_id partitions_values: Dict[str, List[str]] = {} _logger.debug("Starting pagination...") @@ -98,17 +102,34 @@ def _get_partitions( @apply_configs def get_table_types( - database: str, table: str, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> Optional[Dict[str, str]]: """Get all columns and types from a table. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- - database : str + database: str Database name. - table : str + table: str Table name. - boto3_session : boto3.Session(), optional + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time: str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. + catalog_id: str, optional + The ID of the Data Catalog from which to retrieve Databases. + If none is provided, the AWS account ID is used by default. + boto3_session: boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. Returns @@ -125,7 +146,14 @@ def get_table_types( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) try: - response: Dict[str, Any] = client_glue.get_table(DatabaseName=database, Name=table) + response: Dict[str, Any] = client_glue.get_table( + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) + ) except client_glue.exceptions.EntityNotFoundException: return None return _extract_dtypes_from_table_details(response=response) @@ -203,6 +231,7 @@ def databases( def get_tables( catalog_id: Optional[str] = None, database: Optional[str] = None, + transaction_id: Optional[str] = None, name_contains: Optional[str] = None, name_prefix: Optional[str] = None, name_suffix: Optional[str] = None, @@ -222,6 +251,8 @@ def get_tables( If none is provided, the AWS account ID is used by default. database : str, optional Database name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). name_contains : str, optional Select by a specific string on table name name_prefix : str, optional @@ -245,8 +276,6 @@ def get_tables( client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) paginator = client_glue.get_paginator("get_tables") args: Dict[str, str] = {} - if catalog_id is not None: - args["CatalogId"] = catalog_id if (name_prefix is not None) and (name_suffix is not None) and (name_contains is not None): raise exceptions.InvalidArgumentCombination( "Please, does not filter using name_contains and " @@ -267,7 +296,9 @@ def get_tables( dbs = [x["Name"] for x in get_databases(catalog_id=catalog_id)] for db in dbs: args["DatabaseName"] = db - response_iterator = paginator.paginate(**args) + response_iterator = paginator.paginate( + **_catalog_id(catalog_id=catalog_id, **_transaction_id(transaction_id=transaction_id, **args)) + ) try: for page in response_iterator: for tbl in page["TableList"]: @@ -281,6 +312,7 @@ def tables( limit: int = 100, catalog_id: Optional[str] = None, database: Optional[str] = None, + transaction_id: Optional[str] = None, search_text: Optional[str] = None, name_contains: Optional[str] = None, name_prefix: Optional[str] = None, @@ -289,6 +321,10 @@ def tables( ) -> pd.DataFrame: """Get a DataFrame with tables filtered by a search term, prefix, suffix. + Note + ---- + Search feature is not supported for Governed tables. + Parameters ---------- limit : int, optional @@ -298,6 +334,8 @@ def tables( If none is provided, the AWS account ID is used by default. database : str, optional Database name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). search_text : str, optional Select only tables with the given string in table's properties. name_contains : str, optional @@ -324,6 +362,7 @@ def tables( table_iter = get_tables( catalog_id=catalog_id, database=database, + transaction_id=transaction_id, name_contains=name_contains, name_prefix=name_prefix, name_suffix=name_suffix, @@ -342,11 +381,19 @@ def tables( tbls = [x for x in tbls if x["Name"].endswith(name_suffix)] tbls = tbls[:limit] - df_dict: Dict[str, List[str]] = {"Database": [], "Table": [], "Description": [], "Columns": [], "Partitions": []} + df_dict: Dict[str, List[str]] = { + "Database": [], + "Table": [], + "Description": [], + "TableType": [], + "Columns": [], + "Partitions": [], + } for tbl in tbls: df_dict["Database"].append(tbl["DatabaseName"]) df_dict["Table"].append(tbl["Name"]) df_dict["Description"].append(tbl.get("Description", "")) + df_dict["TableType"].append(tbl.get("TableType", "")) try: columns = tbl["StorageDescriptor"]["Columns"] df_dict["Columns"].append(", ".join([x["Name"] for x in columns])) @@ -364,6 +411,10 @@ def search_tables( ) -> Iterator[Dict[str, Any]]: """Get Pandas DataFrame of tables filtered by a search string. + Note + ---- + Search feature is not supported for Governed tables. + Parameters ---------- text : str, optional @@ -386,9 +437,7 @@ def search_tables( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) - args: Dict[str, Any] = {"SearchText": text} - if catalog_id is not None: - args["CatalogId"] = catalog_id + args: Dict[str, Any] = _catalog_id(catalog_id=catalog_id, SearchText=text) response: Dict[str, Any] = client_glue.search_tables(**args) for tbl in response["TableList"]: yield tbl @@ -401,20 +450,34 @@ def search_tables( @apply_configs def table( - database: str, table: str, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> pd.DataFrame: """Get table details as Pandas DataFrame. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- - database : str + database: str Database name. - table : str + table: str Table name. - catalog_id : str, optional + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time: str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. + catalog_id: str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. - boto3_session : boto3.Session(), optional + boto3_session: boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. Returns @@ -429,7 +492,14 @@ def table( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) - tbl = client_glue.get_table(**_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table))["Table"] + tbl = client_glue.get_table( + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) + )["Table"] df_dict: Dict[str, List[Union[str, bool]]] = {"Column Name": [], "Type": [], "Partition": [], "Comment": []} if "StorageDescriptor" in tbl: for col in tbl["StorageDescriptor"].get("Columns", {}): @@ -453,16 +523,35 @@ def table( @apply_configs -def get_table_location(database: str, table: str, boto3_session: Optional[boto3.Session] = None) -> str: +def get_table_location( + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, +) -> str: """Get table's location on Glue catalog. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- - database : str + database: str Database name. - table : str + table: str Table name. - boto3_session : boto3.Session(), optional + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time: str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. + catalog_id: str, optional + The ID of the Data Catalog from which to retrieve Databases. + If none is provided, the AWS account ID is used by default. + boto3_session: boto3.Session(), optional Boto3 Session. The default boto3 session will be used if boto3_session receive None. Returns @@ -478,7 +567,14 @@ def get_table_location(database: str, table: str, boto3_session: Optional[boto3. """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) - res: Dict[str, Any] = client_glue.get_table(DatabaseName=database, Name=table) + res: Dict[str, Any] = client_glue.get_table( + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) + ) try: return cast(str, res["Table"]["StorageDescriptor"]["Location"]) except KeyError as ex: @@ -593,7 +689,11 @@ def get_parquet_partitions( """ return _get_partitions( - database=database, table=table, expression=expression, catalog_id=catalog_id, boto3_session=boto3_session + database=database, + table=table, + expression=expression, + catalog_id=catalog_id, + boto3_session=boto3_session, ) @@ -659,7 +759,11 @@ def get_csv_partitions( """ return _get_partitions( - database=database, table=table, expression=expression, catalog_id=catalog_id, boto3_session=boto3_session + database=database, + table=table, + expression=expression, + catalog_id=catalog_id, + boto3_session=boto3_session, ) @@ -725,21 +829,39 @@ def get_partitions( """ return _get_partitions( - database=database, table=table, expression=expression, catalog_id=catalog_id, boto3_session=boto3_session + database=database, + table=table, + expression=expression, + catalog_id=catalog_id, + boto3_session=boto3_session, ) def get_table_parameters( - database: str, table: str, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> Dict[str, str]: """Get all parameters. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- database : str Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time : str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. catalog_id : str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. @@ -759,23 +881,42 @@ def get_table_parameters( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) response: Dict[str, Any] = client_glue.get_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) ) parameters: Dict[str, str] = response["Table"]["Parameters"] return parameters def get_table_description( - database: str, table: str, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> Optional[str]: """Get table description. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- database : str Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time: str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. catalog_id : str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. @@ -795,7 +936,12 @@ def get_table_description( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) response: Dict[str, Any] = client_glue.get_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) ) desc: Optional[str] = response["Table"].get("Description", None) return desc @@ -803,16 +949,30 @@ def get_table_description( @apply_configs def get_columns_comments( - database: str, table: str, catalog_id: Optional[str] = None, boto3_session: Optional[boto3.Session] = None + database: str, + table: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + boto3_session: Optional[boto3.Session] = None, ) -> Dict[str, str]: """Get all columns comments. + Note + ---- + If reading from a governed table, pass only one of `transaction_id` or `query_as_of_time`. + Parameters ---------- database : str Database name. table : str Table name. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). + query_as_of_time: str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id. catalog_id : str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. @@ -832,7 +992,12 @@ def get_columns_comments( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) response: Dict[str, Any] = client_glue.get_table( - **_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table) + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id( + transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database, Name=table + ), + ) ) comments: Dict[str, str] = {} for c in response["Table"]["StorageDescriptor"]["Columns"]: diff --git a/awswrangler/catalog/_utils.py b/awswrangler/catalog/_utils.py index dbb88e970..6ce55559b 100644 --- a/awswrangler/catalog/_utils.py +++ b/awswrangler/catalog/_utils.py @@ -19,10 +19,23 @@ def _catalog_id(catalog_id: Optional[str] = None, **kwargs: Any) -> Dict[str, An return kwargs +def _transaction_id( + transaction_id: Optional[str] = None, query_as_of_time: Optional[str] = None, **kwargs: Any +) -> Dict[str, Any]: + if transaction_id is not None and query_as_of_time is not None: + raise exceptions.InvalidArgumentCombination( + "Please pass only one of `transaction_id` or `query_as_of_time`, not both" + ) + if transaction_id is not None: + kwargs["TransactionId"] = transaction_id + elif query_as_of_time is not None: + kwargs["QueryAsOfTime"] = query_as_of_time + return kwargs + + def _sanitize_name(name: str) -> str: name = "".join(c for c in unicodedata.normalize("NFD", name) if unicodedata.category(c) != "Mn") # strip accents - name = re.sub("[^A-Za-z0-9_]+", "_", name) # Replacing non alphanumeric characters by underscore - return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower() # Converting CamelCase to snake_case + return re.sub("[^A-Za-z0-9_]+", "_", name).lower() # Replacing non alphanumeric characters by underscore def _extract_dtypes_from_table_details(response: Dict[str, Any]) -> Dict[str, str]: @@ -37,7 +50,11 @@ def _extract_dtypes_from_table_details(response: Dict[str, Any]) -> Dict[str, st @apply_configs def does_table_exist( - database: str, table: str, boto3_session: Optional[boto3.Session] = None, catalog_id: Optional[str] = None + database: str, + table: str, + boto3_session: Optional[boto3.Session] = None, + catalog_id: Optional[str] = None, + transaction_id: Optional[str] = None, ) -> bool: """Check if the table exists. @@ -52,6 +69,8 @@ def does_table_exist( catalog_id : str, optional The ID of the Data Catalog from which to retrieve Databases. If none is provided, the AWS account ID is used by default. + transaction_id: str, optional + The ID of the transaction (i.e. used with GOVERNED tables). Returns ------- @@ -65,7 +84,12 @@ def does_table_exist( """ client_glue: boto3.client = _utils.client(service_name="glue", session=boto3_session) try: - client_glue.get_table(**_catalog_id(catalog_id=catalog_id, DatabaseName=database, Name=table)) + client_glue.get_table( + **_catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, Name=table), + ) + ) return True except client_glue.exceptions.EntityNotFoundException: return False diff --git a/awswrangler/data_api/rds.py b/awswrangler/data_api/rds.py index e95dc5692..b1986706b 100644 --- a/awswrangler/data_api/rds.py +++ b/awswrangler/data_api/rds.py @@ -103,7 +103,10 @@ def _get_statement_result(self, request_id: str) -> pd.DataFrame: rows: List[List[Any]] = [] for record in result["records"]: - row: List[Any] = [connector.DataApiConnector._get_column_value(column) for column in record] + row: List[Any] = [ + connector.DataApiConnector._get_column_value(column) # pylint: disable=protected-access + for column in record + ] rows.append(row) column_names: List[str] = [column["name"] for column in result["columnMetadata"]] diff --git a/awswrangler/data_api/redshift.py b/awswrangler/data_api/redshift.py index a6a5cc3a8..a82f5e7cc 100644 --- a/awswrangler/data_api/redshift.py +++ b/awswrangler/data_api/redshift.py @@ -86,7 +86,10 @@ def _get_statement_result(self, request_id: str) -> pd.DataFrame: for response in response_iterator: column_metadata = response["ColumnMetadata"] for record in response["Records"]: - row: List[Any] = [connector.DataApiConnector._get_column_value(column) for column in record] + row: List[Any] = [ + connector.DataApiConnector._get_column_value(column) # pylint: disable=protected-access + for column in record + ] rows.append(row) column_names: List[str] = [column["name"] for column in column_metadata] diff --git a/awswrangler/exceptions.py b/awswrangler/exceptions.py index dd15a4994..fc17897de 100644 --- a/awswrangler/exceptions.py +++ b/awswrangler/exceptions.py @@ -37,6 +37,10 @@ class InvalidTable(Exception): """InvalidTable exception.""" +class CommitCancelled(Exception): + """CommitCancelled exception.""" + + class QueryFailed(Exception): """QueryFailed exception.""" diff --git a/awswrangler/lakeformation/__init__.py b/awswrangler/lakeformation/__init__.py new file mode 100644 index 000000000..6ab8f46b4 --- /dev/null +++ b/awswrangler/lakeformation/__init__.py @@ -0,0 +1,28 @@ +"""Amazon Lake Formation Module.""" + +from awswrangler.lakeformation._read import read_sql_query, read_sql_table # noqa +from awswrangler.lakeformation._utils import ( # noqa + _build_table_objects, + _get_table_objects, + _update_table_objects, + cancel_transaction, + commit_transaction, + describe_transaction, + extend_transaction, + start_transaction, + wait_query, +) + +__all__ = [ + "read_sql_query", + "read_sql_table", + "_build_table_objects", + "_get_table_objects", + "_update_table_objects", + "cancel_transaction", + "commit_transaction", + "describe_transaction", + "extend_transaction", + "start_transaction", + "wait_query", +] diff --git a/awswrangler/lakeformation/_read.py b/awswrangler/lakeformation/_read.py new file mode 100644 index 000000000..63faad69c --- /dev/null +++ b/awswrangler/lakeformation/_read.py @@ -0,0 +1,326 @@ +"""Amazon Lake Formation Module gathering all read functions.""" +import concurrent.futures +import itertools +import logging +from typing import Any, Dict, List, Optional, Tuple, Union + +import boto3 +import pandas as pd +from pyarrow import NativeFile, RecordBatchStreamReader, Table, concat_tables + +from awswrangler import _data_types, _utils, catalog +from awswrangler._config import apply_configs +from awswrangler.catalog._utils import _catalog_id, _transaction_id +from awswrangler.lakeformation._utils import commit_transaction, start_transaction, wait_query + +_logger: logging.Logger = logging.getLogger(__name__) + + +def _get_work_unit_results( + query_id: str, + token_work_unit: Tuple[str, int], + client_lakeformation: boto3.client, +) -> Table: + token, work_unit = token_work_unit + messages: NativeFile = client_lakeformation.get_work_unit_results( + QueryId=query_id, WorkUnitToken=token, WorkUnitId=work_unit + )["ResultStream"] + return RecordBatchStreamReader(messages.read()).read_all() + + +def _resolve_sql_query( + query_id: str, + categories: Optional[List[str]], + safe: bool, + map_types: bool, + use_threads: bool, + boto3_session: boto3.Session, +) -> pd.DataFrame: + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=boto3_session) + + wait_query(query_id=query_id, boto3_session=boto3_session) + + # The LF Query Engine distributes the load across workers + # Retrieve the tokens and their associated work units until NextToken is '' + # One Token can span multiple work units + # PageSize determines the size of the "Units" array in each call + scan_kwargs: Dict[str, Union[str, int]] = {"QueryId": query_id, "PageSize": 10} + next_token: str = "init_token" # Dummy token + token_work_units: List[Tuple[str, int]] = [] + while next_token: + response = client_lakeformation.get_work_units(**scan_kwargs) + token_work_units.extend( # [(Token0, WorkUnitId0), (Token0, WorkUnitId1), (Token1, WorkUnitId2) ... ] + [ + (unit["WorkUnitToken"], unit_id) + for unit in response["WorkUnitRanges"] + for unit_id in range(unit["WorkUnitIdMin"], unit["WorkUnitIdMax"] + 1) # Max is inclusive + ] + ) + next_token = response.get("NextToken", None) + scan_kwargs["NextToken"] = next_token + + tables: List[Table] = [] + if use_threads is False: + tables = list( + _get_work_unit_results( + query_id=query_id, + token_work_unit=token_work_unit, + client_lakeformation=client_lakeformation, + ) + for token_work_unit in token_work_units + ) + else: + cpus: int = _utils.ensure_cpu_count(use_threads=use_threads) + with concurrent.futures.ThreadPoolExecutor(max_workers=cpus) as executor: + tables = list( + executor.map( + _get_work_unit_results, + itertools.repeat(query_id), + token_work_units, + itertools.repeat(client_lakeformation), + ) + ) + table = concat_tables(tables) + args = { + "use_threads": use_threads, + "split_blocks": True, + "self_destruct": True, + "integer_object_nulls": False, + "date_as_object": True, + "ignore_metadata": True, + "strings_to_categorical": False, + "categories": categories, + "safe": safe, + "types_mapper": _data_types.pyarrow2pandas_extension if map_types else None, + } + return _utils.ensure_df_is_mutable(df=table.to_pandas(**args)) + + +@apply_configs +def read_sql_query( + sql: str, + database: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + categories: Optional[List[str]] = None, + safe: bool = True, + map_types: bool = True, + use_threads: bool = True, + boto3_session: Optional[boto3.Session] = None, + params: Optional[Dict[str, Any]] = None, +) -> pd.DataFrame: + """Execute PartiQL query on AWS Glue Table (Transaction ID or time travel timestamp). Return Pandas DataFrame. + + Note + ---- + ORDER BY operations are not honoured. + i.e. sql="SELECT * FROM my_table ORDER BY my_column" is NOT valid + + Note + ---- + The database must NOT be explicitely defined in the PartiQL statement. + i.e. sql="SELECT * FROM my_table" is valid + but sql="SELECT * FROM my_db.my_table" is NOT valid + + Note + ---- + Pass one of `transaction_id` or `query_as_of_time`, not both. + + Parameters + ---------- + sql : str + partiQL query. + database : str + AWS Glue database name + transaction_id : str, optional + The ID of the transaction at which to read the table contents. + Cannot be specified alongside query_as_of_time + query_as_of_time : str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id + catalog_id : str, optional + The ID of the Data Catalog from which to retrieve Databases. + If none is provided, the AWS account ID is used by default. + categories: Optional[List[str]], optional + List of columns names that should be returned as pandas.Categorical. + Recommended for memory restricted environments. + safe : bool, default True + For certain data types, a cast is needed in order to store the + data in a pandas DataFrame or Series (e.g. timestamps are always + stored as nanoseconds in pandas). This option controls whether it + is a safe cast or not. + map_types : bool, default True + True to convert pyarrow DataTypes to pandas ExtensionDtypes. It is + used to override the default pandas type for conversion of built-in + pyarrow types or in absence of pandas_metadata in the Table schema. + use_threads : bool + True to enable concurrent requests, False to disable multiple threads. + When enabled, os.cpu_count() is used as the max number of threads. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session is used if boto3_session receives None. + params: Dict[str, any], optional + Dict of parameters used to format the partiQL query. Only named parameters are supported. + The dict must contain the information in the form {"name": "value"} and the SQL query must contain + `:name`. + + Returns + ------- + pd.DataFrame + Pandas DataFrame. + + Examples + -------- + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_query( + ... sql="SELECT * FROM my_table;", + ... database="my_db", + ... catalog_id="111111111111" + ... ) + + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_query( + ... sql="SELECT * FROM my_table LIMIT 10;", + ... database="my_db", + ... transaction_id="1b62811fa3e02c4e5fdbaa642b752030379c4a8a70da1f8732ce6ccca47afdc9" + ... ) + + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_query( + ... sql="SELECT * FROM my_table WHERE name=:name; AND city=:city;", + ... database="my_db", + ... query_as_of_time="1611142914", + ... params={"name": "'filtered_name'", "city": "'filtered_city'"} + ... ) + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + commit_trans: bool = False + if params is None: + params = {} + for key, value in params.items(): + sql = sql.replace(f":{key};", str(value)) + + if not any([transaction_id, query_as_of_time]): + _logger.debug("Neither `transaction_id` nor `query_as_of_time` were specified, starting transaction") + transaction_id = start_transaction(read_only=True, boto3_session=session) + commit_trans = True + args: Dict[str, Optional[str]] = _catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, query_as_of_time=query_as_of_time, DatabaseName=database), + ) + query_id: str = client_lakeformation.start_query_planning(QueryString=sql, QueryPlanningContext=args)["QueryId"] + df = _resolve_sql_query( + query_id=query_id, + categories=categories, + safe=safe, + map_types=map_types, + use_threads=use_threads, + boto3_session=session, + ) + if commit_trans: + commit_transaction(transaction_id=transaction_id) # type: ignore + return df + + +@apply_configs +def read_sql_table( + table: str, + database: str, + transaction_id: Optional[str] = None, + query_as_of_time: Optional[str] = None, + catalog_id: Optional[str] = None, + categories: Optional[List[str]] = None, + safe: bool = True, + map_types: bool = True, + use_threads: bool = True, + boto3_session: Optional[boto3.Session] = None, +) -> pd.DataFrame: + """Extract all rows from AWS Glue Table (Transaction ID or time travel timestamp). Return Pandas DataFrame. + + Note + ---- + ORDER BY operations are not honoured. + i.e. sql="SELECT * FROM my_table ORDER BY my_column" is NOT valid + + Note + ---- + Pass one of `transaction_id` or `query_as_of_time`, not both. + + Parameters + ---------- + table : str + AWS Glue table name. + database : str + AWS Glue database name + transaction_id : str, optional + The ID of the transaction at which to read the table contents. + Cannot be specified alongside query_as_of_time + query_as_of_time : str, optional + The time as of when to read the table contents. Must be a valid Unix epoch timestamp. + Cannot be specified alongside transaction_id + catalog_id : str, optional + The ID of the Data Catalog from which to retrieve Databases. + If none is provided, the AWS account ID is used by default. + categories: Optional[List[str]], optional + List of columns names that should be returned as pandas.Categorical. + Recommended for memory restricted environments. + safe : bool, default True + For certain data types, a cast is needed in order to store the + data in a pandas DataFrame or Series (e.g. timestamps are always + stored as nanoseconds in pandas). This option controls whether it + is a safe cast or not. + map_types : bool, default True + True to convert pyarrow DataTypes to pandas ExtensionDtypes. It is + used to override the default pandas type for conversion of built-in + pyarrow types or in absence of pandas_metadata in the Table schema. + use_threads : bool + True to enable concurrent requests, False to disable multiple threads. + When enabled, os.cpu_count() is used as the max number of threads. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session is used if boto3_session receives None. + + Returns + ------- + pd.DataFrame + Pandas DataFrame. + + Examples + -------- + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_table( + ... table="my_table", + ... database="my_db", + ... catalog_id="111111111111", + ... ) + + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_table( + ... table="my_table", + ... database="my_db", + ... transaction_id="1b62811fa3e02c4e5fdbaa642b752030379c4a8a70da1f8732ce6ccca47afdc9", + ... ) + + >>> import awswrangler as wr + >>> df = wr.lakeformation.read_sql_table( + ... table="my_table", + ... database="my_db", + ... query_as_of_time="1611142914", + ... use_threads=True, + ... ) + + """ + table = catalog.sanitize_table_name(table=table) + return read_sql_query( + sql=f"SELECT * FROM {table}", + database=database, + transaction_id=transaction_id, + query_as_of_time=query_as_of_time, + safe=safe, + map_types=map_types, + catalog_id=catalog_id, + categories=categories, + use_threads=use_threads, + boto3_session=boto3_session, + ) diff --git a/awswrangler/lakeformation/_utils.py b/awswrangler/lakeformation/_utils.py new file mode 100644 index 000000000..54eb16e6b --- /dev/null +++ b/awswrangler/lakeformation/_utils.py @@ -0,0 +1,345 @@ +"""Utilities Module for Amazon Lake Formation.""" +import logging +import time +from math import inf +from threading import Thread +from typing import Any, Dict, List, Optional, Union + +import boto3 +import botocore.exceptions + +from awswrangler import _utils, exceptions +from awswrangler.catalog._utils import _catalog_id, _transaction_id +from awswrangler.s3._describe import describe_objects + +_QUERY_FINAL_STATES: List[str] = ["ERROR", "FINISHED"] +_QUERY_WAIT_POLLING_DELAY: float = 2 # SECONDS +_TRANSACTION_FINAL_STATES: List[str] = ["aborted", "committed"] +_TRANSACTION_WAIT_COMMIT_DELAY: float = 5 # SECONDS +_TRANSACTION_WAIT_POLLING_DELAY: float = 10 # SECONDS + +_logger: logging.Logger = logging.getLogger(__name__) + + +def _without_keys(d: Dict[str, Any], keys: List[str]) -> Dict[str, Any]: + return {x: d[x] for x in d if x not in keys} + + +def _build_partition_predicate( + partition_cols: List[str], + partitions_types: Dict[str, str], + partitions_values: List[str], +) -> str: + partition_predicates: List[str] = [] + for col, val in zip(partition_cols, partitions_values): + if partitions_types[col].startswith(("tinyint", "smallint", "int", "bigint", "float", "double", "decimal")): + partition_predicates.append(f"{col}={str(val)}") + else: + partition_predicates.append(f"{col}='{str(val)}'") + return " AND ".join(partition_predicates) + + +def _build_table_objects( + paths: List[str], + partitions_values: Dict[str, List[str]], + use_threads: Union[bool, int], + boto3_session: Optional[boto3.Session], +) -> List[Dict[str, Any]]: + table_objects: List[Dict[str, Any]] = [] + paths_desc: Dict[str, Dict[str, Any]] = describe_objects( + path=paths, use_threads=use_threads, boto3_session=boto3_session + ) + for path, path_desc in paths_desc.items(): + table_object: Dict[str, Any] = { + "Uri": path, + "ETag": path_desc["ETag"], + "Size": path_desc["ContentLength"], + } + if partitions_values: + table_object["PartitionValues"] = partitions_values[f"{path.rsplit('/', 1)[0].rstrip('/')}/"] + table_objects.append(table_object) + return table_objects + + +def _get_table_objects( + catalog_id: Optional[str], + database: str, + table: str, + transaction_id: str, + boto3_session: Optional[boto3.Session], + partition_cols: Optional[List[str]] = None, + partitions_types: Optional[Dict[str, str]] = None, + partitions_values: Optional[List[str]] = None, +) -> List[Dict[str, Any]]: + """Get Governed Table Objects from Lake Formation Engine.""" + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + scan_kwargs: Dict[str, Union[str, int]] = _catalog_id( + catalog_id=catalog_id, + **_transaction_id(transaction_id=transaction_id, DatabaseName=database, TableName=table, MaxResults=100), + ) + if partition_cols and partitions_types and partitions_values: + scan_kwargs["PartitionPredicate"] = _build_partition_predicate( + partition_cols=partition_cols, partitions_types=partitions_types, partitions_values=partitions_values + ) + + next_token: str = "init_token" # Dummy token + table_objects: List[Dict[str, Any]] = [] + while next_token: + response = _utils.try_it( + f=client_lakeformation.get_table_objects, + ex=botocore.exceptions.ClientError, + ex_code="ResourceNotReadyException", + base=1.0, + max_num_tries=5, + **scan_kwargs, + ) + for objects in response["Objects"]: + for table_object in objects["Objects"]: + if objects["PartitionValues"]: + table_object["PartitionValues"] = objects["PartitionValues"] + table_objects.append(table_object) + next_token = response.get("NextToken", None) + scan_kwargs["NextToken"] = next_token + return table_objects + + +def _update_table_objects( + catalog_id: Optional[str], + database: str, + table: str, + transaction_id: str, + boto3_session: Optional[boto3.Session], + add_objects: Optional[List[Dict[str, Any]]] = None, + del_objects: Optional[List[Dict[str, Any]]] = None, +) -> None: + """Register Governed Table Objects changes to Lake Formation Engine.""" + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + update_kwargs: Dict[str, Union[str, int, List[Dict[str, Dict[str, Any]]]]] = _catalog_id( + catalog_id=catalog_id, **_transaction_id(transaction_id=transaction_id, DatabaseName=database, TableName=table) + ) + + write_operations: List[Dict[str, Dict[str, Any]]] = [] + if add_objects: + write_operations.extend({"AddObject": obj} for obj in add_objects) + if del_objects: + write_operations.extend({"DeleteObject": _without_keys(obj, ["Size"])} for obj in del_objects) + update_kwargs["WriteOperations"] = write_operations + + client_lakeformation.update_table_objects(**update_kwargs) + + +def _monitor_transaction(transaction_id: str, time_out: float, boto3_session: Optional[boto3.Session] = None) -> None: + start = time.time() + elapsed_time = 0.0 + state: str = describe_transaction(transaction_id=transaction_id, boto3_session=boto3_session) + while (state not in _TRANSACTION_FINAL_STATES) and (time_out > elapsed_time): + try: + extend_transaction(transaction_id=transaction_id, boto3_session=boto3_session) + except botocore.exceptions.ClientError as ex: + if ex.response["Error"]["Code"] in ["TransactionCanceledException", "TransactionCommittedException"]: + _logger.debug("Transaction: %s was already canceled or committed.", transaction_id) + else: + raise ex + time.sleep(_TRANSACTION_WAIT_POLLING_DELAY) + elapsed_time = time.time() - start + state = describe_transaction(transaction_id=transaction_id, boto3_session=boto3_session) + _logger.debug("Transaction state: %s", state) + + +def describe_transaction(transaction_id: str, boto3_session: Optional[boto3.Session] = None) -> str: + """Return the status of a single transaction. + + Parameters + ---------- + transaction_id : str + The ID of the transaction. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + str + Transaction status (i.e. active|committed|aborted). + + Examples + -------- + >>> import awswrangler as wr + >>> status = wr.lakeformation.describe_transaction(transaction_id="...") + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + details: Dict[str, Any] = client_lakeformation.describe_transaction(TransactionId=transaction_id)[ + "TransactionDescription" + ] + return details["TransactionStatus"] # type: ignore + + +def cancel_transaction(transaction_id: str, boto3_session: Optional[boto3.Session] = None) -> None: + """Cancel the specified transaction. Returns exception if the transaction was previously committed. + + Parameters + ---------- + transaction_id : str + The ID of the transaction. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + None + None. + + Examples + -------- + >>> import awswrangler as wr + >>> wr.lakeformation.cancel_transaction(transaction_id="...") + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + client_lakeformation.cancel_transaction(TransactionId=transaction_id) + + +def start_transaction( + read_only: Optional[bool] = False, time_out: Optional[float] = inf, boto3_session: Optional[boto3.Session] = None +) -> str: + """Start a new transaction and returns its transaction ID. + + The transaction is periodically extended until it's committed, canceled or the defined time-out is reached. + + Parameters + ---------- + read_only : bool, optional + Indicates that that this transaction should be read only. + Writes made using a read-only transaction ID will be rejected. + Read-only transactions do not need to be committed. + time_out: float, optional + Maximum duration over which a transaction is extended. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + str + An opaque identifier for the transaction. + + Examples + -------- + >>> import awswrangler as wr + >>> transaction_id = wr.lakeformation.start_transaction(read_only=False) + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + transaction_type: str = "READ_ONLY" if read_only else "READ_AND_WRITE" + transaction_id: str = client_lakeformation.start_transaction(TransactionType=transaction_type)["TransactionId"] + # Extend the transaction while in "active" state in a separate thread + t = Thread(target=_monitor_transaction, args=(transaction_id, time_out, boto3_session)) + t.daemon = True # Ensures thread is killed when any exception is raised + t.start() + return transaction_id + + +def commit_transaction(transaction_id: str, boto3_session: Optional[boto3.Session] = None) -> None: + """Commit the specified transaction. Returns exception if the transaction was previously canceled. + + Parameters + ---------- + transaction_id : str + The ID of the transaction. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + None + None. + + Examples + -------- + >>> import awswrangler as wr + >>> wr.lakeformation.commit_transaction(transaction_id="...") + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + client_lakeformation.commit_transaction(TransactionId=transaction_id) + committed: bool = False + # Confirm transaction was committed + while not committed: + state: str = describe_transaction(transaction_id=transaction_id, boto3_session=session) + if state == "committed": + committed = True + elif state == "aborted": + raise exceptions.CommitCancelled(f"Transaction commit with id {transaction_id} was aborted.") + time.sleep(_TRANSACTION_WAIT_COMMIT_DELAY) + + +def extend_transaction(transaction_id: str, boto3_session: Optional[boto3.Session] = None) -> None: + """Indicate to the service that the specified transaction is still active and should not be canceled. + + Parameters + ---------- + transaction_id : str + The ID of the transaction. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + None + None. + + Examples + -------- + >>> import awswrangler as wr + >>> wr.lakeformation.extend_transaction(transaction_id="...") + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + client_lakeformation.extend_transaction(TransactionId=transaction_id) + + +def wait_query(query_id: str, boto3_session: Optional[boto3.Session] = None) -> Dict[str, Any]: + """Wait for the query to end. + + Parameters + ---------- + query_id : str + Lake Formation query execution ID. + boto3_session : boto3.Session(), optional + Boto3 Session. The default boto3 session will be used if boto3_session received None. + + Returns + ------- + Dict[str, Any] + Dictionary with the get_query_state response. + + Examples + -------- + >>> import awswrangler as wr + >>> res = wr.lakeformation.wait_query(query_id='query-id') + + """ + session: boto3.Session = _utils.ensure_session(session=boto3_session) + client_lakeformation: boto3.client = _utils.client(service_name="lakeformation", session=session) + + response: Dict[str, Any] = client_lakeformation.get_query_state(QueryId=query_id) + state: str = response["State"] + while state not in _QUERY_FINAL_STATES: + time.sleep(_QUERY_WAIT_POLLING_DELAY) + response = client_lakeformation.get_query_state(QueryId=query_id) + state = response["State"] + _logger.debug("state: %s", state) + if state == "ERROR": + raise exceptions.QueryFailed(response.get("Error")) + return response diff --git a/awswrangler/opensearch/_write.py b/awswrangler/opensearch/_write.py index a22c35905..316cfa486 100644 --- a/awswrangler/opensearch/_write.py +++ b/awswrangler/opensearch/_write.py @@ -201,7 +201,7 @@ def create_index( body["mappings"] = {index: mappings} if settings: body["settings"] = settings - if body == {}: + if not body: body = None # type: ignore # ignore 400 cause by IndexAlreadyExistsException when creating an index diff --git a/awswrangler/s3/_write_dataset.py b/awswrangler/s3/_write_dataset.py index 72a0dfa33..e2463c3aa 100644 --- a/awswrangler/s3/_write_dataset.py +++ b/awswrangler/s3/_write_dataset.py @@ -7,7 +7,7 @@ import numpy as np import pandas as pd -from awswrangler import exceptions +from awswrangler import exceptions, lakeformation from awswrangler.s3._delete import delete_objects from awswrangler.s3._write_concurrent import _WriteProxy @@ -22,6 +22,12 @@ def _to_partitions( use_threads: Union[bool, int], mode: str, partition_cols: List[str], + partitions_types: Optional[Dict[str, str]], + catalog_id: Optional[str], + database: Optional[str], + table: Optional[str], + table_type: Optional[str], + transaction_id: Optional[str], bucketing_info: Optional[Tuple[List[str], int]], filename_prefix: str, boto3_session: boto3.Session, @@ -36,12 +42,35 @@ def _to_partitions( subdir = "/".join([f"{name}={val}" for name, val in zip(partition_cols, keys)]) prefix: str = f"{path_root}{subdir}/" if mode == "overwrite_partitions": - delete_objects( - path=prefix, - use_threads=use_threads, - boto3_session=boto3_session, - s3_additional_kwargs=func_kwargs.get("s3_additional_kwargs"), - ) + if (table_type == "GOVERNED") and (table is not None) and (database is not None): + del_objects: List[ + Dict[str, Any] + ] = lakeformation._get_table_objects( # pylint: disable=protected-access + catalog_id=catalog_id, + database=database, + table=table, + transaction_id=transaction_id, # type: ignore + partition_cols=partition_cols, + partitions_values=keys, + partitions_types=partitions_types, + boto3_session=boto3_session, + ) + if del_objects: + lakeformation._update_table_objects( # pylint: disable=protected-access + catalog_id=catalog_id, + database=database, + table=table, + transaction_id=transaction_id, # type: ignore + del_objects=del_objects, + boto3_session=boto3_session, + ) + else: + delete_objects( + path=prefix, + use_threads=use_threads, + boto3_session=boto3_session, + s3_additional_kwargs=func_kwargs.get("s3_additional_kwargs"), + ) if bucketing_info: _to_buckets( func=func, @@ -136,6 +165,12 @@ def _to_dataset( use_threads: Union[bool, int], mode: str, partition_cols: Optional[List[str]], + partitions_types: Optional[Dict[str, str]], + catalog_id: Optional[str], + database: Optional[str], + table: Optional[str], + table_type: Optional[str], + transaction_id: Optional[str], bucketing_info: Optional[Tuple[List[str], int]], boto3_session: boto3.Session, **func_kwargs: Any, @@ -148,12 +183,25 @@ def _to_dataset( f"{mode} is a invalid mode, please use append, overwrite or overwrite_partitions." ) if (mode == "overwrite") or ((mode == "overwrite_partitions") and (not partition_cols)): - delete_objects( - path=path_root, - use_threads=use_threads, - boto3_session=boto3_session, - s3_additional_kwargs=func_kwargs.get("s3_additional_kwargs"), - ) + if (table_type == "GOVERNED") and (table is not None) and (database is not None): + del_objects: List[Dict[str, Any]] = lakeformation._get_table_objects( # pylint: disable=protected-access + catalog_id=catalog_id, + database=database, + table=table, + transaction_id=transaction_id, # type: ignore + boto3_session=boto3_session, + ) + if del_objects: + lakeformation._update_table_objects( # pylint: disable=protected-access + catalog_id=catalog_id, + database=database, + table=table, + transaction_id=transaction_id, # type: ignore + del_objects=del_objects, + boto3_session=boto3_session, + ) + else: + delete_objects(path=path_root, use_threads=use_threads, boto3_session=boto3_session) # Writing partitions_values: Dict[str, List[str]] = {} @@ -166,9 +214,15 @@ def _to_dataset( path_root=path_root, use_threads=use_threads, mode=mode, + catalog_id=catalog_id, + database=database, + table=table, + table_type=table_type, + transaction_id=transaction_id, bucketing_info=bucketing_info, filename_prefix=filename_prefix, partition_cols=partition_cols, + partitions_types=partitions_types, boto3_session=boto3_session, index=index, **func_kwargs, @@ -197,4 +251,22 @@ def _to_dataset( ) _logger.debug("paths: %s", paths) _logger.debug("partitions_values: %s", partitions_values) + if (table_type == "GOVERNED") and (table is not None) and (database is not None): + add_objects: List[Dict[str, Any]] = lakeformation._build_table_objects( # pylint: disable=protected-access + paths, partitions_values, use_threads=use_threads, boto3_session=boto3_session + ) + try: + if add_objects: + lakeformation._update_table_objects( # pylint: disable=protected-access + catalog_id=catalog_id, + database=database, + table=table, + transaction_id=transaction_id, # type: ignore + add_objects=add_objects, + boto3_session=boto3_session, + ) + except Exception as ex: + _logger.error(ex) + raise + return paths, partitions_values diff --git a/awswrangler/s3/_write_parquet.py b/awswrangler/s3/_write_parquet.py index 2ca49394b..d91334493 100644 --- a/awswrangler/s3/_write_parquet.py +++ b/awswrangler/s3/_write_parquet.py @@ -12,7 +12,7 @@ import pyarrow.lib import pyarrow.parquet -from awswrangler import _data_types, _utils, catalog, exceptions +from awswrangler import _data_types, _utils, catalog, exceptions, lakeformation from awswrangler._config import apply_configs from awswrangler.s3._delete import delete_objects from awswrangler.s3._fs import open_s3_object @@ -190,7 +190,7 @@ def _to_parquet( @apply_configs -def to_parquet( # pylint: disable=too-many-arguments,too-many-locals +def to_parquet( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements df: pd.DataFrame, path: Optional[str] = None, index: bool = False, @@ -211,6 +211,8 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals schema_evolution: bool = True, database: Optional[str] = None, table: Optional[str] = None, + table_type: Optional[str] = None, + transaction_id: Optional[str] = None, dtype: Optional[Dict[str, str]] = None, description: Optional[str] = None, parameters: Optional[Dict[str, str]] = None, @@ -314,6 +316,10 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals Glue/Athena catalog: Database name. table : str, optional Glue/Athena catalog: Table name. + table_type: str, optional + The type of the Glue Table. Set to EXTERNAL_TABLE if None. + transaction_id: str, optional + The ID of the transaction when writing to a Governed Table. dtype : Dict[str, str], optional Dictionary of columns names and Athena/Glue types to be casted. Useful when you have columns with undetermined or mixed data types. @@ -459,6 +465,28 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals } } + Writing dataset to Glue governed table + + >>> import awswrangler as wr + >>> import pandas as pd + >>> wr.s3.to_parquet( + ... df=pd.DataFrame({ + ... 'col': [1, 2, 3], + ... 'col2': ['A', 'A', 'B'], + ... 'col3': [None, None, None] + ... }), + ... dataset=True, + ... mode='append', + ... database='default', # Athena/Glue database + ... table='my_table', # Athena/Glue table + ... table_type='GOVERNED', + ... transaction_id="xxx", + ... ) + { + 'paths': ['s3://.../x.parquet'], + 'partitions_values: {} + } + Writing dataset casting empty column data type >>> import awswrangler as wr @@ -505,6 +533,9 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals dtype = dtype if dtype else {} partitions_values: Dict[str, List[str]] = {} mode = "append" if mode is None else mode + commit_trans: bool = False + if transaction_id: + table_type = "GOVERNED" filename_prefix = filename_prefix + uuid.uuid4().hex if filename_prefix else uuid.uuid4().hex cpus: int = _utils.ensure_cpu_count(use_threads=use_threads) session: boto3.Session = _utils.ensure_session(session=boto3_session) @@ -517,9 +548,12 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals catalog_table_input: Optional[Dict[str, Any]] = None if database is not None and table is not None: catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) - catalog_path = catalog_table_input["StorageDescriptor"]["Location"] if catalog_table_input else None + catalog_path: Optional[str] = None + if catalog_table_input: + table_type = catalog_table_input["TableType"] + catalog_path = catalog_table_input["StorageDescriptor"]["Location"] if path is None: if catalog_path: path = catalog_path @@ -532,6 +566,10 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals raise exceptions.InvalidArgumentValue( f"The specified path: {path}, does not match the existing Glue catalog table path: {catalog_path}" ) + if (table_type == "GOVERNED") and (not transaction_id): + _logger.debug("`transaction_id` not specified for GOVERNED table, starting transaction") + transaction_id = lakeformation.start_transaction(read_only=False, boto3_session=boto3_session) + commit_trans = True df = _apply_dtype(df=df, dtype=dtype, catalog_table_input=catalog_table_input, mode=mode) schema: pa.Schema = _data_types.pyarrow_schema_from_pandas( df=df, index=index, ignore_cols=partition_cols, dtype=dtype @@ -563,6 +601,42 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals ) if schema_evolution is False: _utils.check_schema_changes(columns_types=columns_types, table_input=catalog_table_input, mode=mode) + + if (catalog_table_input is None) and (table_type == "GOVERNED"): + catalog._create_parquet_table( # pylint: disable=protected-access + database=database, + table=table, + path=path, # type: ignore + columns_types=columns_types, + table_type=table_type, + partitions_types=partitions_types, + bucketing_info=bucketing_info, + compression=compression, + description=description, + parameters=parameters, + columns_comments=columns_comments, + boto3_session=session, + mode=mode, + transaction_id=transaction_id, + catalog_versioning=catalog_versioning, + projection_enabled=projection_enabled, + projection_types=projection_types, + projection_ranges=projection_ranges, + projection_values=projection_values, + projection_intervals=projection_intervals, + projection_digits=projection_digits, + projection_storage_location_template=None, + catalog_id=catalog_id, + catalog_table_input=catalog_table_input, + ) + catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access + database=database, + table=table, + boto3_session=session, + transaction_id=transaction_id, + catalog_id=catalog_id, + ) + paths, partitions_values = _to_dataset( func=_to_parquet, concurrent_partitioning=concurrent_partitioning, @@ -572,10 +646,16 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals index=index, compression=compression, compression_ext=compression_ext, + catalog_id=catalog_id, + database=database, + table=table, + table_type=table_type, + transaction_id=transaction_id, pyarrow_additional_kwargs=pyarrow_additional_kwargs, cpus=cpus, use_threads=use_threads, partition_cols=partition_cols, + partitions_types=partitions_types, bucketing_info=bucketing_info, dtype=dtype, mode=mode, @@ -591,6 +671,7 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals table=table, path=path, # type: ignore columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, compression=compression, @@ -599,6 +680,7 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals columns_comments=columns_comments, boto3_session=session, mode=mode, + transaction_id=transaction_id, catalog_versioning=catalog_versioning, projection_enabled=projection_enabled, projection_types=projection_types, @@ -610,7 +692,7 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals catalog_id=catalog_id, catalog_table_input=catalog_table_input, ) - if partitions_values and (regular_partitions is True): + if partitions_values and (regular_partitions is True) and (table_type != "GOVERNED"): _logger.debug("partitions_values:\n%s", partitions_values) catalog.add_parquet_partitions( database=database, @@ -622,6 +704,10 @@ def to_parquet( # pylint: disable=too-many-arguments,too-many-locals catalog_id=catalog_id, columns_types=columns_types, ) + if commit_trans: + lakeformation.commit_transaction( + transaction_id=transaction_id, boto3_session=boto3_session # type: ignore + ) except Exception: _logger.debug("Catalog write failed, cleaning up S3 (paths: %s).", paths) delete_objects( diff --git a/awswrangler/s3/_write_text.py b/awswrangler/s3/_write_text.py index 0f7ce4330..0be00caf6 100644 --- a/awswrangler/s3/_write_text.py +++ b/awswrangler/s3/_write_text.py @@ -10,7 +10,7 @@ import pandas as pd from pandas.io.common import infer_compression -from awswrangler import _data_types, _utils, catalog, exceptions +from awswrangler import _data_types, _utils, catalog, exceptions, lakeformation from awswrangler._config import apply_configs from awswrangler.s3._delete import delete_objects from awswrangler.s3._fs import open_s3_object @@ -90,6 +90,8 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state schema_evolution: bool = False, database: Optional[str] = None, table: Optional[str] = None, + table_type: Optional[str] = None, + transaction_id: Optional[str] = None, dtype: Optional[Dict[str, str]] = None, description: Optional[str] = None, parameters: Optional[Dict[str, str]] = None, @@ -191,6 +193,10 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state Glue/Athena catalog: Database name. table : str, optional Glue/Athena catalog: Table name. + table_type: str, optional + The type of the Glue Table. Set to EXTERNAL_TABLE if None + transaction_id: str, optional + The ID of the transaction when writing to a Governed Table. dtype : Dict[str, str], optional Dictionary of columns names and Athena/Glue types to be casted. Useful when you have columns with undetermined or mixed data types. @@ -357,6 +363,28 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state } } + Writing dataset to Glue governed table + + >>> import awswrangler as wr + >>> import pandas as pd + >>> wr.s3.to_csv( + ... df=pd.DataFrame({ + ... 'col': [1, 2, 3], + ... 'col2': ['A', 'A', 'B'], + ... 'col3': [None, None, None] + ... }), + ... dataset=True, + ... mode='append', + ... database='default', # Athena/Glue database + ... table='my_table', # Athena/Glue table + ... table_type='GOVERNED', + ... transaction_id="xxx", + ... ) + { + 'paths': ['s3://.../x.csv'], + 'partitions_values: {} + } + Writing dataset casting empty column data type >>> import awswrangler as wr @@ -409,6 +437,9 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state dtype = dtype if dtype else {} partitions_values: Dict[str, List[str]] = {} mode = "append" if mode is None else mode + commit_trans: bool = False + if transaction_id: + table_type = "GOVERNED" filename_prefix = filename_prefix + uuid.uuid4().hex if filename_prefix else uuid.uuid4().hex session: boto3.Session = _utils.ensure_session(session=boto3_session) @@ -420,9 +451,13 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state catalog_table_input: Optional[Dict[str, Any]] = None if database and table: catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) - catalog_path = catalog_table_input.get("StorageDescriptor", {}).get("Location") if catalog_table_input else None + + catalog_path: Optional[str] = None + if catalog_table_input: + table_type = catalog_table_input["TableType"] + catalog_path = catalog_table_input.get("StorageDescriptor", {}).get("Location") if path is None: if catalog_path: path = catalog_path @@ -439,6 +474,10 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state raise exceptions.InvalidArgumentCombination( "If database and table are given, you must use one of these compressions: gzip, bz2 or None." ) + if (table_type == "GOVERNED") and (not transaction_id): + _logger.debug("`transaction_id` not specified for GOVERNED table, starting transaction") + transaction_id = lakeformation.start_transaction(read_only=False, boto3_session=boto3_session) + commit_trans = True df = _apply_dtype(df=df, dtype=dtype, catalog_table_input=catalog_table_input, mode=mode) @@ -481,13 +520,54 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state columns_types: Dict[str, str] = {} partitions_types: Dict[str, str] = {} - if (database is not None) and (table is not None): + + if database and table: columns_types, partitions_types = _data_types.athena_types_from_pandas_partitioned( df=df, index=index, partition_cols=partition_cols, dtype=dtype, index_left=True ) if schema_evolution is False: _utils.check_schema_changes(columns_types=columns_types, table_input=catalog_table_input, mode=mode) + if (catalog_table_input is None) and (table_type == "GOVERNED"): + catalog._create_csv_table( # pylint: disable=protected-access + database=database, + table=table, + path=path, + columns_types=columns_types, + table_type=table_type, + partitions_types=partitions_types, + bucketing_info=bucketing_info, + description=description, + parameters=parameters, + columns_comments=columns_comments, + boto3_session=session, + mode=mode, + transaction_id=transaction_id, + schema_evolution=schema_evolution, + catalog_versioning=catalog_versioning, + sep=sep, + projection_enabled=projection_enabled, + projection_types=projection_types, + projection_ranges=projection_ranges, + projection_values=projection_values, + projection_intervals=projection_intervals, + projection_digits=projection_digits, + projection_storage_location_template=None, + catalog_table_input=catalog_table_input, + catalog_id=catalog_id, + compression=pandas_kwargs.get("compression"), + skip_header_line_count=None, + serde_library=None, + serde_parameters=None, + ) + catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access + database=database, + table=table, + boto3_session=session, + transaction_id=transaction_id, + catalog_id=catalog_id, + ) + paths, partitions_values = _to_dataset( func=_to_text, concurrent_partitioning=concurrent_partitioning, @@ -496,9 +576,15 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state index=index, sep=sep, compression=compression, + catalog_id=catalog_id, + database=database, + table=table, + table_type=table_type, + transaction_id=transaction_id, filename_prefix=filename_prefix, use_threads=use_threads, partition_cols=partition_cols, + partitions_types=partitions_types, bucketing_info=bucketing_info, mode=mode, boto3_session=session, @@ -520,8 +606,9 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state catalog._create_csv_table( # pylint: disable=protected-access database=database, table=table, - path=path, # type: ignore + path=path, columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, description=description, @@ -529,6 +616,7 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state columns_comments=columns_comments, boto3_session=session, mode=mode, + transaction_id=transaction_id, catalog_versioning=catalog_versioning, schema_evolution=schema_evolution, sep=sep, @@ -546,7 +634,7 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state serde_library=serde_library, serde_parameters=serde_parameters, ) - if partitions_values and (regular_partitions is True): + if partitions_values and (regular_partitions is True) and (table_type != "GOVERNED"): _logger.debug("partitions_values:\n%s", partitions_values) catalog.add_csv_partitions( database=database, @@ -561,6 +649,10 @@ def to_csv( # pylint: disable=too-many-arguments,too-many-locals,too-many-state columns_types=columns_types, compression=pandas_kwargs.get("compression"), ) + if commit_trans: + lakeformation.commit_transaction( + transaction_id=transaction_id, boto3_session=boto3_session # type: ignore + ) except Exception: _logger.debug("Catalog write failed, cleaning up S3 (paths: %s).", paths) delete_objects( @@ -592,6 +684,8 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat schema_evolution: bool = True, database: Optional[str] = None, table: Optional[str] = None, + table_type: Optional[str] = None, + transaction_id: Optional[str] = None, dtype: Optional[Dict[str, str]] = None, description: Optional[str] = None, parameters: Optional[Dict[str, str]] = None, @@ -672,6 +766,10 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat Glue/Athena catalog: Database name. table : str, optional Glue/Athena catalog: Table name. + table_type: str, optional + The type of the Glue Table. Set to EXTERNAL_TABLE if None + transaction_id: str, optional + The ID of the transaction when writing to a Governed Table. dtype : Dict[str, str], optional Dictionary of columns names and Athena/Glue types to be casted. Useful when you have columns with undetermined or mixed data types. @@ -792,6 +890,9 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat dtype = dtype if dtype else {} partitions_values: Dict[str, List[str]] = {} mode = "append" if mode is None else mode + commit_trans: bool = False + if transaction_id: + table_type = "GOVERNED" filename_prefix = filename_prefix + uuid.uuid4().hex if filename_prefix else uuid.uuid4().hex session: boto3.Session = _utils.ensure_session(session=boto3_session) @@ -801,11 +902,15 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat # Evaluating dtype catalog_table_input: Optional[Dict[str, Any]] = None - if database is not None and table is not None: + + if database and table: catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access - database=database, table=table, boto3_session=session, catalog_id=catalog_id + database=database, table=table, boto3_session=session, transaction_id=transaction_id, catalog_id=catalog_id ) - catalog_path = catalog_table_input["StorageDescriptor"]["Location"] if catalog_table_input else None + catalog_path: Optional[str] = None + if catalog_table_input: + table_type = catalog_table_input["TableType"] + catalog_path = catalog_table_input.get("StorageDescriptor", {}).get("Location") if path is None: if catalog_path: path = catalog_path @@ -822,6 +927,11 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat raise exceptions.InvalidArgumentCombination( "If database and table are given, you must use one of these compressions: gzip, bz2 or None." ) + if (table_type == "GOVERNED") and (not transaction_id): + _logger.debug("`transaction_id` not specified for GOVERNED table, starting transaction") + transaction_id = lakeformation.start_transaction(read_only=False, boto3_session=boto3_session) + commit_trans = True + df = _apply_dtype(df=df, dtype=dtype, catalog_table_input=catalog_table_input, mode=mode) if dataset is False: @@ -840,12 +950,52 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat columns_types: Dict[str, str] = {} partitions_types: Dict[str, str] = {} - if (database is not None) and (table is not None): + + if database and table: columns_types, partitions_types = _data_types.athena_types_from_pandas_partitioned( df=df, index=index, partition_cols=partition_cols, dtype=dtype ) if schema_evolution is False: _utils.check_schema_changes(columns_types=columns_types, table_input=catalog_table_input, mode=mode) + + if (catalog_table_input is None) and (table_type == "GOVERNED"): + catalog._create_json_table( # pylint: disable=protected-access + database=database, + table=table, + path=path, # type: ignore + columns_types=columns_types, + table_type=table_type, + partitions_types=partitions_types, + bucketing_info=bucketing_info, + description=description, + parameters=parameters, + columns_comments=columns_comments, + boto3_session=session, + mode=mode, + transaction_id=transaction_id, + catalog_versioning=catalog_versioning, + schema_evolution=schema_evolution, + projection_enabled=projection_enabled, + projection_types=projection_types, + projection_ranges=projection_ranges, + projection_values=projection_values, + projection_intervals=projection_intervals, + projection_digits=projection_digits, + projection_storage_location_template=None, + catalog_table_input=catalog_table_input, + catalog_id=catalog_id, + compression=pandas_kwargs.get("compression"), + serde_library=None, + serde_parameters=None, + ) + catalog_table_input = catalog._get_table_input( # pylint: disable=protected-access + database=database, + table=table, + boto3_session=session, + transaction_id=transaction_id, + catalog_id=catalog_id, + ) + paths, partitions_values = _to_dataset( func=_to_text, concurrent_partitioning=concurrent_partitioning, @@ -854,8 +1004,14 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat filename_prefix=filename_prefix, index=index, compression=compression, + catalog_id=catalog_id, + database=database, + table=table, + table_type=table_type, + transaction_id=transaction_id, use_threads=use_threads, partition_cols=partition_cols, + partitions_types=partitions_types, bucketing_info=bucketing_info, mode=mode, boto3_session=session, @@ -874,6 +1030,7 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat table=table, path=path, # type: ignore columns_types=columns_types, + table_type=table_type, partitions_types=partitions_types, bucketing_info=bucketing_info, description=description, @@ -881,6 +1038,7 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat columns_comments=columns_comments, boto3_session=session, mode=mode, + transaction_id=transaction_id, catalog_versioning=catalog_versioning, schema_evolution=schema_evolution, projection_enabled=projection_enabled, @@ -896,7 +1054,7 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat serde_library=serde_library, serde_parameters=serde_parameters, ) - if partitions_values and (regular_partitions is True): + if partitions_values and (regular_partitions is True) and (table_type != "GOVERNED"): _logger.debug("partitions_values:\n%s", partitions_values) catalog.add_json_partitions( database=database, @@ -910,6 +1068,10 @@ def to_json( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat columns_types=columns_types, compression=pandas_kwargs.get("compression"), ) + if commit_trans: + lakeformation.commit_transaction( + transaction_id=transaction_id, boto3_session=boto3_session # type: ignore + ) except Exception: _logger.debug("Catalog write failed, cleaning up S3 (paths: %s).", paths) delete_objects( diff --git a/docs/source/api.rst b/docs/source/api.rst index c62c6a5df..0951826ad 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -4,6 +4,7 @@ API Reference * `Amazon S3`_ * `AWS Glue Catalog`_ * `Amazon Athena`_ +* `AWS Lake Formation`_ * `Amazon Redshift`_ * `PostgreSQL`_ * `MySQL`_ @@ -120,6 +121,23 @@ Amazon Athena stop_query_execution wait_query +AWS Lake Formation +------------------ + +.. currentmodule:: awswrangler.lakeformation + +.. autosummary:: + :toctree: stubs + + read_sql_query + read_sql_table + cancel_transaction + commit_transaction + describe_transaction + extend_transaction + start_transaction + wait_query + Amazon Redshift --------------- diff --git a/poetry.lock b/poetry.lock index 5df674cd1..f111a1b7a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,66 +1,3 @@ -[[package]] -name = "aiobotocore" -version = "1.4.2" -description = "Async client for aws services using botocore and aiohttp" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -aiohttp = ">=3.3.1" -aioitertools = ">=0.5.1" -botocore = ">=1.20.106,<1.20.107" -wrapt = ">=1.10.10" - -[package.extras] -awscli = ["awscli (>=1.19.106,<1.19.107)"] -boto3 = ["boto3 (>=1.17.106,<1.17.107)"] - -[[package]] -name = "aiohttp" -version = "3.8.0" -description = "Async http client/server framework (asyncio)" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<3.0" -frozenlist = ">=1.1.1" -idna-ssl = {version = ">=1.0", markers = "python_version < \"3.7\""} -multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["aiodns", "brotli", "cchardet"] - -[[package]] -name = "aioitertools" -version = "0.8.0" -description = "itertools and builtins for AsyncIO and mixed iterables" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing_extensions = {version = ">=3.7", markers = "python_version < \"3.8\""} - -[[package]] -name = "aiosignal" -version = "1.2.0" -description = "aiosignal: a list of registered asynchronous callbacks" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -frozenlist = ">=1.1.0" - [[package]] name = "alabaster" version = "0.7.12" @@ -71,13 +8,14 @@ python-versions = "*" [[package]] name = "anyio" -version = "3.3.4" +version = "3.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] +contextvars = {version = "*", markers = "python_version < \"3.7\""} dataclasses = {version = "*", markers = "python_version < \"3.7\""} idna = ">=2.8" sniffio = ">=1.1" @@ -85,7 +23,7 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] [[package]] @@ -122,7 +60,7 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.8.4" +version = "2.9.0" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -130,7 +68,7 @@ python-versions = "~=3.6" [package.dependencies] lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} +typed-ast = {version = ">=1.4.0,<2.0", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<1.14" @@ -142,25 +80,6 @@ category = "dev" optional = false python-versions = ">=3.5" -[[package]] -name = "async-timeout" -version = "4.0.0" -description = "Timeout context manager for asyncio programs" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = ">=3.6.5" - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -category = "dev" -optional = false -python-versions = ">=3.5" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -204,7 +123,7 @@ python-versions = "*" [[package]] name = "backports.entry-points-selectable" -version = "1.1.0" +version = "1.1.1" description = "Compatibility shim providing selectable entry points for older implementations" category = "dev" optional = false @@ -215,7 +134,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] +testing = ["pytest", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] [[package]] name = "beautifulsoup4" @@ -234,7 +153,7 @@ lxml = ["lxml"] [[package]] name = "black" -version = "21.10b0" +version = "21.11b1" description = "The uncompromising code formatter." category = "dev" optional = false @@ -246,9 +165,9 @@ dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.9.0,<1" platformdirs = ">=2" -regex = ">=2020.1.8" +regex = ">=2021.4.4" tomli = ">=0.2.6,<2.0.0" -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = [ {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, @@ -276,24 +195,27 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.17.106" +version = "1.20.18" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.20.106,<1.21.0" +botocore = ">=1.23.18,<1.24.0" jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.4.0,<0.5.0" +s3transfer = ">=0.5.0,<0.6.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.20.106" +version = "1.23.18" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] jmespath = ">=0.7.1,<1.0.0" @@ -301,7 +223,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.11.24)"] +crt = ["awscrt (==0.12.5)"] [[package]] name = "bump2version" @@ -332,7 +254,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.7" +version = "2.0.8" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -374,7 +296,7 @@ immutables = ">=0.9" [[package]] name = "coverage" -version = "6.1.1" +version = "6.2" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -388,7 +310,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "35.0.0" +version = "36.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "dev" optional = false @@ -399,7 +321,7 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] @@ -488,7 +410,7 @@ testing = ["pre-commit"] [[package]] name = "filelock" -version = "3.3.2" +version = "3.4.0" description = "A platform independent file lock." category = "dev" optional = false @@ -512,17 +434,9 @@ mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" -[[package]] -name = "frozenlist" -version = "1.2.0" -description = "A list-like structure which implements collections.abc.MutableSequence" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "fsspec" -version = "2021.10.1" +version = "2021.11.1" description = "File-system specification" category = "dev" optional = false @@ -558,20 +472,9 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "idna-ssl" -version = "1.1.0" -description = "Patch ssl.match_hostname for Unicode(idna) domains support" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -idna = ">=2.0" - [[package]] name = "imagesize" -version = "1.2.0" +version = "1.3.0" description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false @@ -651,7 +554,7 @@ test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "jedi (<=0.17.2)"] [[package]] name = "ipython" -version = "7.16.1" +version = "7.16.2" description = "IPython: Productive Interactive Computing" category = "dev" optional = false @@ -662,7 +565,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" -jedi = ">=0.10" +jedi = ">=0.10,<=0.17.2" pexpect = {version = "*", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" @@ -690,7 +593,7 @@ python-versions = "*" [[package]] name = "isort" -version = "5.10.0" +version = "5.10.1" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -704,22 +607,22 @@ plugins = ["setuptools"] [[package]] name = "jedi" -version = "0.18.0" +version = "0.17.2" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -parso = ">=0.8.0,<0.9.0" +parso = ">=0.7.0,<0.8.0" [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] +qa = ["flake8 (==3.7.9)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] [[package]] name = "jinja2" -version = "3.0.2" +version = "3.0.3" description = "A very fast and expressive template engine." category = "dev" optional = false @@ -782,7 +685,7 @@ format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jupyter-client" -version = "7.0.6" +version = "7.1.0" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false @@ -815,7 +718,7 @@ traitlets = "*" [[package]] name = "jupyter-server" -version = "1.11.2" +version = "1.12.1" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false @@ -843,7 +746,7 @@ test = ["coverage", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "requests", " [[package]] name = "jupyterlab" -version = "3.2.2" +version = "3.2.4" description = "JupyterLab computational environment" category = "dev" optional = false @@ -941,17 +844,9 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "more-itertools" -version = "8.10.0" -description = "More routines for operating on iterables, beyond itertools" -category = "dev" -optional = false -python-versions = ">=3.5" - [[package]] name = "moto" -version = "2.2.12" +version = "2.2.17" description = "A library that allows your python tests to easily mock out the boto library" category = "dev" optional = false @@ -964,7 +859,6 @@ cryptography = ">=3.3.1" importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} Jinja2 = ">=2.10.1" MarkupSafe = "!=2.0.0a1" -more-itertools = "*" python-dateutil = ">=2.1,<3.0.0" pytz = "*" requests = ">=2.5" @@ -973,31 +867,24 @@ werkzeug = "*" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] -apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] +all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] +apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] awslambda = ["docker (>=2.5.1)"] batch = ["docker (>=2.5.1)"] cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)"] -cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] +cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] ds = ["sshpubkeys (>=3.1.0)"] dynamodb2 = ["docker (>=2.5.1)"] dynamodbstreams = ["docker (>=2.5.1)"] ec2 = ["sshpubkeys (>=3.1.0)"] efs = ["sshpubkeys (>=3.1.0)"] iotdata = ["jsondiff (>=1.1.2)"] +route53resolver = ["sshpubkeys (>=3.1.0)"] s3 = ["PyYAML (>=5.1)"] -server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools", "flask", "flask-cors"] +server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools", "flask", "flask-cors"] ssm = ["PyYAML (>=5.1)", "dataclasses"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] -[[package]] -name = "multidict" -version = "5.2.0" -description = "multidict implementation" -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "mypy" version = "0.910" @@ -1041,7 +928,7 @@ test = ["pytest", "pytest-tornasync", "pytest-console-scripts"] [[package]] name = "nbclient" -version = "0.5.4" +version = "0.5.9" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false @@ -1055,9 +942,9 @@ nest-asyncio = "*" traitlets = ">=4.2" [package.extras] -dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] -test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] [[package]] name = "nbconvert" @@ -1137,7 +1024,7 @@ sphinx = ">=1.8" [[package]] name = "nest-asyncio" -version = "1.5.1" +version = "1.5.4" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false @@ -1145,7 +1032,7 @@ python-versions = ">=3.5" [[package]] name = "notebook" -version = "6.4.5" +version = "6.4.6" description = "A web-based notebook environment for interactive computing" category = "dev" optional = false @@ -1160,9 +1047,10 @@ jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" nbconvert = "*" nbformat = "*" +nest-asyncio = ">=1.5" prometheus-client = "*" pyzmq = ">=17" -Send2Trash = ">=1.5.0" +Send2Trash = ">=1.8.0" terminado = ">=0.8.3" tornado = ">=6.1" traitlets = ">=4.2.1" @@ -1210,14 +1098,14 @@ requests = ["requests (>=2.4.0,<3.0.0)"] [[package]] name = "packaging" -version = "21.2" +version = "21.3" description = "Core utilities for Python packages" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2,<3" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandas" @@ -1280,15 +1168,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "parso" -version = "0.8.2" +version = "0.7.1" description = "A Python Parser" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +testing = ["docopt", "pytest (>=3.0.7)"] [[package]] name = "pathspec" @@ -1300,7 +1187,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pbr" -version = "5.7.0" +version = "5.8.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -1319,7 +1206,7 @@ ptyprocess = ">=0.5" [[package]] name = "pg8000" -version = "1.22.0" +version = "1.22.1" description = "PostgreSQL interface library" category = "main" optional = false @@ -1400,7 +1287,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.22" +version = "3.0.23" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false @@ -1427,7 +1314,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pyarrow" -version = "6.0.0" +version = "6.0.1" description = "Python library for Apache Arrow" category = "main" optional = false @@ -1446,7 +1333,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "dev" optional = false @@ -1495,19 +1382,19 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.11.1" +version = "2.12.1" description = "python code static checker" category = "dev" optional = false -python-versions = "~=3.6" +python-versions = ">=3.6.2" [package.dependencies] -astroid = ">=2.8.0,<2.9" +astroid = ">=2.9.0,<2.10" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" platformdirs = ">=2.2.0" -toml = ">=0.7.1" +toml = ">=0.9.2" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [[package]] @@ -1532,11 +1419,14 @@ python-versions = "*" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.6" description = "Python parsing module" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" @@ -1683,7 +1573,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "1.1.5" +version = "1.1.6" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false @@ -1703,16 +1593,16 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "redshift-connector" -version = "2.0.889" +version = "2.0.900" description = "Redshift interface library" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] beautifulsoup4 = ">=4.7.0,<5.0.0" -boto3 = ">=1.16.8,<2.0.0" -botocore = ">=1.19.8,<2.0.0" +boto3 = ">=1.9.201,<2.0.0" +botocore = ">=1.12.201,<2.0.0" lxml = ">=4.6.2" packaging = "*" pytz = ">=2020.1,<2021.9" @@ -1724,7 +1614,7 @@ full = ["numpy", "pandas"] [[package]] name = "regex" -version = "2021.11.2" +version = "2021.11.10" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -1762,7 +1652,7 @@ six = "*" [[package]] name = "responses" -version = "0.15.0" +version = "0.16.0" description = "A utility library for mocking out the `requests` Python library." category = "dev" optional = false @@ -1789,27 +1679,23 @@ docutils = ">=0.11,<1.0" [[package]] name = "s3fs" -version = "2021.10.1" +version = "0.4.2" description = "Convenient Filesystem interface over S3" category = "dev" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.5" [package.dependencies] -aiobotocore = ">=1.4.1,<1.5.0" -fsspec = "2021.10.1" - -[package.extras] -awscli = ["aiobotocore[awscli] (>=1.4.1,<1.5.0)"] -boto3 = ["aiobotocore[boto3] (>=1.4.1,<1.5.0)"] +botocore = ">=1.12.91" +fsspec = ">=0.6.0" [[package]] name = "s3transfer" -version = "0.4.2" +version = "0.5.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = "*" +python-versions = ">= 3.6" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1862,7 +1748,7 @@ contextvars = {version = ">=2.1", markers = "python_version < \"3.7\""} [[package]] name = "snowballstemmer" -version = "2.1.0" +version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." category = "dev" optional = false @@ -1870,7 +1756,7 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.3" +version = "2.3.1" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false @@ -1878,7 +1764,7 @@ python-versions = ">=3.6" [[package]] name = "sphinx" -version = "4.2.0" +version = "4.3.1" description = "Python documentation generator" category = "dev" optional = false @@ -2098,11 +1984,11 @@ python-versions = "*" [[package]] name = "typing-extensions" -version = "3.10.0.2" -description = "Backported and Experimental Type Hints for Python 3.5+" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "urllib3" @@ -2217,19 +2103,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "yarl" -version = "1.7.2" -description = "Yet another URL library" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - [[package]] name = "zipp" version = "3.6.0" @@ -2248,101 +2121,16 @@ sqlserver = ["pyodbc"] [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <=3.10.0" -content-hash = "6319f70971609c4173b70aa14eab47069eea4c42741538fdbc843bbd9a43386a" +content-hash = "ddb2097690a7db7c218e5fbb0a33da42aa2738b127fa79408c77c00f5dfd7d5f" [metadata.files] -aiobotocore = [ - {file = "aiobotocore-1.4.2.tar.gz", hash = "sha256:c2f4ef325aaa839e9e2a53346b4c1c203656783a4985ab36fd4c2a9ef2dc1d2b"}, -] -aiohttp = [ - {file = "aiohttp-3.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:48f218a5257b6bc16bcf26a91d97ecea0c7d29c811a90d965f3dd97c20f016d6"}, - {file = "aiohttp-3.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2fee4d656a7cc9ab47771b2a9e8fad8a9a33331c1b59c3057ecf0ac858f5bfe"}, - {file = "aiohttp-3.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:688a1eb8c1a5f7e795c7cb67e0fe600194e6723ba35f138dfae0db20c0cb8f94"}, - {file = "aiohttp-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ba09bb3dcb0b7ec936a485db2b64be44fe14cdce0a5eac56f50e55da3627385"}, - {file = "aiohttp-3.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7715daf84f10bcebc083ad137e3eced3e1c8e7fa1f096ade9a8d02b08f0d91c"}, - {file = "aiohttp-3.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3f81fbbc170418e22918a9585fd7281bbc11d027064d62aa4b507552c92671"}, - {file = "aiohttp-3.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1fa9f50aa1f114249b7963c98e20dc35c51be64096a85bc92433185f331de9cc"}, - {file = "aiohttp-3.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8a50150419b741ee048b53146c39c47053f060cb9d98e78be08fdbe942eaa3c4"}, - {file = "aiohttp-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a84c335337b676d832c1e2bc47c3a97531b46b82de9f959dafb315cbcbe0dfcd"}, - {file = "aiohttp-3.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88d4917c30fcd7f6404fb1dc713fa21de59d3063dcc048f4a8a1a90e6bbbd739"}, - {file = "aiohttp-3.8.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b76669b7c058b8020b11008283c3b8e9c61bfd978807c45862956119b77ece45"}, - {file = "aiohttp-3.8.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:84fe1732648c1bc303a70faa67cbc2f7f2e810c8a5bca94f6db7818e722e4c0a"}, - {file = "aiohttp-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:730b7c2b7382194d9985ffdc32ab317e893bca21e0665cb1186bdfbb4089d990"}, - {file = "aiohttp-3.8.0-cp310-cp310-win32.whl", hash = "sha256:0a96473a1f61d7920a9099bc8e729dc8282539d25f79c12573ee0fdb9c8b66a8"}, - {file = "aiohttp-3.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:764c7c6aa1f78bd77bd9674fc07d1ec44654da1818d0eef9fb48aa8371a3c847"}, - {file = "aiohttp-3.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9951c2696c4357703001e1fe6edc6ae8e97553ac630492ea1bf64b429cb712a3"}, - {file = "aiohttp-3.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0af379221975054162959e00daf21159ff69a712fc42ed0052caddbd70d52ff4"}, - {file = "aiohttp-3.8.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9689af0f0a89e5032426c143fa3683b0451f06c83bf3b1e27902bd33acfae769"}, - {file = "aiohttp-3.8.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe4a327da0c6b6e59f2e474ae79d6ee7745ac3279fd15f200044602fa31e3d79"}, - {file = "aiohttp-3.8.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ecb314e59bedb77188017f26e6b684b1f6d0465e724c3122a726359fa62ca1ba"}, - {file = "aiohttp-3.8.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5399a44a529083951b55521cf4ecbf6ad79fd54b9df57dbf01699ffa0549fc9"}, - {file = "aiohttp-3.8.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:09754a0d5eaab66c37591f2f8fac8f9781a5f61d51aa852a3261c4805ca6b984"}, - {file = "aiohttp-3.8.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:adf0cb251b1b842c9dee5cfcdf880ba0aae32e841b8d0e6b6feeaef002a267c5"}, - {file = "aiohttp-3.8.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:a4759e85a191de58e0ea468ab6fd9c03941986eee436e0518d7a9291fab122c8"}, - {file = "aiohttp-3.8.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:28369fe331a59d80393ec82df3d43307c7461bfaf9217999e33e2acc7984ff7c"}, - {file = "aiohttp-3.8.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2f44d1b1c740a9e2275160d77c73a11f61e8a916191c572876baa7b282bcc934"}, - {file = "aiohttp-3.8.0-cp36-cp36m-win32.whl", hash = "sha256:e27cde1e8d17b09730801ce97b6e0c444ba2a1f06348b169fd931b51d3402f0d"}, - {file = "aiohttp-3.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:15a660d06092b7c92ed17c1dbe6c1eab0a02963992d60e3e8b9d5fa7fa81f01e"}, - {file = "aiohttp-3.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:257f4fad1714d26d562572095c8c5cd271d5a333252795cb7a002dca41fdbad7"}, - {file = "aiohttp-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6074a3b2fa2d0c9bf0963f8dfc85e1e54a26114cc8594126bc52d3fa061c40e"}, - {file = "aiohttp-3.8.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a315ceb813208ef32bdd6ec3a85cbe3cb3be9bbda5fd030c234592fa9116993"}, - {file = "aiohttp-3.8.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a52b141ff3b923a9166595de6e3768a027546e75052ffba267d95b54267f4ab"}, - {file = "aiohttp-3.8.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a038cb1e6e55b26bb5520ccffab7f539b3786f5553af2ee47eb2ec5cbd7084e"}, - {file = "aiohttp-3.8.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98b1ea2763b33559dd9ec621d67fc17b583484cb90735bfb0ec3614c17b210e4"}, - {file = "aiohttp-3.8.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9e8723c3256641e141cd18f6ce478d54a004138b9f1a36e41083b36d9ecc5fc5"}, - {file = "aiohttp-3.8.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:14a6f026eca80dfa3d52e86be89feb5cd878f6f4a6adb34457e2c689fd85229b"}, - {file = "aiohttp-3.8.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c62d4791a8212c885b97a63ef5f3974b2cd41930f0cd224ada9c6ee6654f8150"}, - {file = "aiohttp-3.8.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:90a97c2ed2830e7974cbe45f0838de0aefc1c123313f7c402e21c29ec063fbb4"}, - {file = "aiohttp-3.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dcc4d5dd5fba3affaf4fd08f00ef156407573de8c63338787614ccc64f96b321"}, - {file = "aiohttp-3.8.0-cp37-cp37m-win32.whl", hash = "sha256:de42f513ed7a997bc821bddab356b72e55e8396b1b7ba1bf39926d538a76a90f"}, - {file = "aiohttp-3.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7d76e8a83396e06abe3df569b25bd3fc88bf78b7baa2c8e4cf4aaf5983af66a3"}, - {file = "aiohttp-3.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d79174d96446a02664e2bffc95e7b6fa93b9e6d8314536c5840dff130d0878b"}, - {file = "aiohttp-3.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a6551057a846bf72c7a04f73de3fcaca269c0bd85afe475ceb59d261c6a938c"}, - {file = "aiohttp-3.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:871d4fdc56288caa58b1094c20f2364215f7400411f76783ea19ad13be7c8e19"}, - {file = "aiohttp-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba08a71caa42eef64357257878fb17f3fba3fba6e81a51d170e32321569e079"}, - {file = "aiohttp-3.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51f90dabd9933b1621260b32c2f0d05d36923c7a5a909eb823e429dba0fd2f3e"}, - {file = "aiohttp-3.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f348ebd20554e8bc26e8ef3ed8a134110c0f4bf015b3b4da6a4ddf34e0515b19"}, - {file = "aiohttp-3.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d5f8c04574efa814a24510122810e3a3c77c0552f9f6ff65c9862f1f046be2c3"}, - {file = "aiohttp-3.8.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ecffdc748d3b40dd3618ede0170e4f5e1d3c9647cfb410d235d19e62cb54ee0"}, - {file = "aiohttp-3.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:577cc2c7b807b174814dac2d02e673728f2e46c7f90ceda3a70ea4bb6d90b769"}, - {file = "aiohttp-3.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6b79f6c31e68b6dafc0317ec453c83c86dd8db1f8f0c6f28e97186563fca87a0"}, - {file = "aiohttp-3.8.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2bdd655732e38b40f8a8344d330cfae3c727fb257585df923316aabbd489ccb8"}, - {file = "aiohttp-3.8.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:63fa57a0708573d3c059f7b5527617bd0c291e4559298473df238d502e4ab98c"}, - {file = "aiohttp-3.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3f90ee275b1d7c942e65b5c44c8fb52d55502a0b9a679837d71be2bd8927661"}, - {file = "aiohttp-3.8.0-cp38-cp38-win32.whl", hash = "sha256:fa818609357dde5c4a94a64c097c6404ad996b1d38ca977a72834b682830a722"}, - {file = "aiohttp-3.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:097ecf52f6b9859b025c1e36401f8aa4573552e887d1b91b4b999d68d0b5a3b3"}, - {file = "aiohttp-3.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:be03a7483ad9ea60388f930160bb3728467dd0af538aa5edc60962ee700a0bdc"}, - {file = "aiohttp-3.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:78d51e35ed163783d721b6f2ce8ce3f82fccfe471e8e50a10fba13a766d31f5a"}, - {file = "aiohttp-3.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bda75d73e7400e81077b0910c9a60bf9771f715420d7e35fa7739ae95555f195"}, - {file = "aiohttp-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:707adc30ea6918fba725c3cb3fe782d271ba352b22d7ae54a7f9f2e8a8488c41"}, - {file = "aiohttp-3.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f58aa995b905ab82fe228acd38538e7dc1509e01508dcf307dad5046399130f"}, - {file = "aiohttp-3.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c996eb91bfbdab1e01e2c02e7ff678c51e2b28e3a04e26e41691991cc55795"}, - {file = "aiohttp-3.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d6a1a66bb8bac9bc2892c2674ea363486bfb748b86504966a390345a11b1680e"}, - {file = "aiohttp-3.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dafc01a32b4a1d7d3ef8bfd3699406bb44f7b2e0d3eb8906d574846e1019b12f"}, - {file = "aiohttp-3.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:949a605ef3907254b122f845baa0920407080cdb1f73aa64f8d47df4a7f4c4f9"}, - {file = "aiohttp-3.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0d7b056fd3972d353cb4bc305c03f9381583766b7f8c7f1c44478dba69099e33"}, - {file = "aiohttp-3.8.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f1d39a744101bf4043fa0926b3ead616607578192d0a169974fb5265ab1e9d2"}, - {file = "aiohttp-3.8.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:67ca7032dfac8d001023fadafc812d9f48bf8a8c3bb15412d9cdcf92267593f4"}, - {file = "aiohttp-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cb751ef712570d3bda9a73fd765ff3e1aba943ec5d52a54a0c2e89c7eef9da1e"}, - {file = "aiohttp-3.8.0-cp39-cp39-win32.whl", hash = "sha256:6d3e027fe291b77f6be9630114a0200b2c52004ef20b94dc50ca59849cd623b3"}, - {file = "aiohttp-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3c5e9981e449d54308c6824f172ec8ab63eb9c5f922920970249efee83f7e919"}, - {file = "aiohttp-3.8.0.tar.gz", hash = "sha256:d3b19d8d183bcfd68b25beebab8dc3308282fe2ca3d6ea3cb4cd101b3c279f8d"}, -] -aioitertools = [ - {file = "aioitertools-0.8.0-py3-none-any.whl", hash = "sha256:3a141f01d1050ac8c01917aee248d262736dab875ce0471f0dba5f619346b452"}, - {file = "aioitertools-0.8.0.tar.gz", hash = "sha256:8b02facfbc9b0f1867739949a223f3d3267ed8663691cc95abd94e2c1d8c2b46"}, -] -aiosignal = [ - {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, - {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, -] alabaster = [ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] anyio = [ - {file = "anyio-3.3.4-py3-none-any.whl", hash = "sha256:4fd09a25ab7fa01d34512b7249e366cd10358cdafc95022c7ff8c8f8a5026d66"}, - {file = "anyio-3.3.4.tar.gz", hash = "sha256:67da67b5b21f96b9d3d65daa6ea99f5d5282cb09f50eb4456f8fb51dffefc3ff"}, + {file = "anyio-3.4.0-py3-none-any.whl", hash = "sha256:2855a9423524abcdd652d942f8932fda1735210f77a6b392eafd9ff34d3fe020"}, + {file = "anyio-3.4.0.tar.gz", hash = "sha256:24adc69309fb5779bc1e06158e143e0b6d2c56b302a3ac3de3083c705a6ed39d"}, ] appnope = [ {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, @@ -2366,21 +2154,13 @@ asn1crypto = [ {file = "asn1crypto-1.4.0.tar.gz", hash = "sha256:f4f6e119474e58e04a2b1af817eb585b4fd72bdd89b998624712b5c99be7641c"}, ] astroid = [ - {file = "astroid-2.8.4-py3-none-any.whl", hash = "sha256:0755c998e7117078dcb7d0bda621391dd2a85da48052d948c7411ab187325346"}, - {file = "astroid-2.8.4.tar.gz", hash = "sha256:1e83a69fd51b013ebf5912d26b9338d6643a55fec2f20c787792680610eed4a2"}, + {file = "astroid-2.9.0-py3-none-any.whl", hash = "sha256:776ca0b748b4ad69c00bfe0fff38fa2d21c338e12c84aa9715ee0d473c422778"}, + {file = "astroid-2.9.0.tar.gz", hash = "sha256:5939cf55de24b92bda00345d4d0659d01b3c7dafb5055165c330bc7c568ba273"}, ] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, ] -async-timeout = [ - {file = "async-timeout-4.0.0.tar.gz", hash = "sha256:7d87a4e8adba8ededb52e579ce6bc8276985888913620c935094c2276fd83382"}, - {file = "async_timeout-4.0.0-py3-none-any.whl", hash = "sha256:f3303dddf6cafa748a92747ab6c2ecf60e0aeca769aee4c151adfce243a05d9b"}, -] -asynctest = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -2398,28 +2178,28 @@ backcall = [ {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] "backports.entry-points-selectable" = [ - {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, - {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, + {file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"}, + {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"}, ] beautifulsoup4 = [ {file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"}, {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, ] black = [ - {file = "black-21.10b0-py3-none-any.whl", hash = "sha256:6eb7448da9143ee65b856a5f3676b7dda98ad9abe0f87fce8c59291f15e82a5b"}, - {file = "black-21.10b0.tar.gz", hash = "sha256:a9952229092e325fe5f3dae56d81f639b23f7131eb840781947e4b2886030f33"}, + {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, + {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, ] bleach = [ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.17.106-py2.py3-none-any.whl", hash = "sha256:231b2023f4fe12af679afa7d893534ce2703db2318a8fa51fc7876890760f352"}, - {file = "boto3-1.17.106.tar.gz", hash = "sha256:c0740378b913ca53f5fc0dba91e99a752c5a30ae7b58a0c5e54e3e2a68df26c5"}, + {file = "boto3-1.20.18-py3-none-any.whl", hash = "sha256:06ec67884fe9a24a95562d83c5defe4fd3b71ee6065a63de67335b443c376532"}, + {file = "boto3-1.20.18.tar.gz", hash = "sha256:e4d0af77eb53e69adf2c77296c6cb408d8b168088905446d718bcfdab633d274"}, ] botocore = [ - {file = "botocore-1.20.106-py2.py3-none-any.whl", hash = "sha256:47ec01b20c4bc6aaa16d21f756ead2f437b47c1335b083356cdc874e9140b023"}, - {file = "botocore-1.20.106.tar.gz", hash = "sha256:6d5c983808b1d00437f56d0c08412bd82d9f8012fdb77e555f97277a1fd4d5df"}, + {file = "botocore-1.23.18-py3-none-any.whl", hash = "sha256:a46fcc6a65c0ef44ec3e04e329ad2dd94cbcbc4a1e2987b56ec914fe052b6e5c"}, + {file = "botocore-1.23.18.tar.gz", hash = "sha256:1bf5134cfeca3188bdd96584efc1de71c24f27b8cb711a28a1a331d8d7fef2aa"}, ] bump2version = [ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, @@ -2482,8 +2262,8 @@ cffi = [ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, - {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, + {file = "charset-normalizer-2.0.8.tar.gz", hash = "sha256:735e240d9a8506778cd7a453d97e817e536bb1fc29f4f6961ce297b9c7a917b0"}, + {file = "charset_normalizer-2.0.8-py3-none-any.whl", hash = "sha256:83fcdeb225499d6344c8f7f34684c2981270beacc32ede2e669e94f7fa544405"}, ] click = [ {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, @@ -2497,74 +2277,76 @@ contextvars = [ {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, ] coverage = [ - {file = "coverage-6.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:42a1fb5dee3355df90b635906bb99126faa7936d87dfc97eacc5293397618cb7"}, - {file = "coverage-6.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a00284dbfb53b42e35c7dd99fc0e26ef89b4a34efff68078ed29d03ccb28402a"}, - {file = "coverage-6.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:51a441011a30d693e71dea198b2a6f53ba029afc39f8e2aeb5b77245c1b282ef"}, - {file = "coverage-6.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e76f017b6d4140a038c5ff12be1581183d7874e41f1c0af58ecf07748d36a336"}, - {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7833c872718dc913f18e51ee97ea0dece61d9930893a58b20b3daf09bb1af6b6"}, - {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8186b5a4730c896cbe1e4b645bdc524e62d874351ae50e1db7c3e9f5dc81dc26"}, - {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbca34dca5a2d60f81326d908d77313816fad23d11b6069031a3d6b8c97a54f9"}, - {file = "coverage-6.1.1-cp310-cp310-win32.whl", hash = "sha256:72bf437d54186d104388cbae73c9f2b0f8a3e11b6e8d7deb593bd14625c96026"}, - {file = "coverage-6.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:994ce5a7b3d20981b81d83618aa4882f955bfa573efdbef033d5632b58597ba9"}, - {file = "coverage-6.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ab6a0fe4c96f8058d41948ddf134420d3ef8c42d5508b5a341a440cce7a37a1d"}, - {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10ab138b153e4cc408b43792cb7f518f9ee02f4ff55cd1ab67ad6fd7e9905c7e"}, - {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7e083d32965d2eb6638a77e65b622be32a094fdc0250f28ce6039b0732fbcaa8"}, - {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:359a32515e94e398a5c0fa057e5887a42e647a9502d8e41165cf5cb8d3d1ca67"}, - {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:bf656cd74ff7b4ed7006cdb2a6728150aaad69c7242b42a2a532f77b63ea233f"}, - {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dc5023be1c2a8b0a0ab5e31389e62c28b2453eb31dd069f4b8d1a0f9814d951a"}, - {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:557594a50bfe3fb0b1b57460f6789affe8850ad19c1acf2d14a3e12b2757d489"}, - {file = "coverage-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:9eb0a1923354e0fdd1c8a6f53f5db2e6180d670e2b587914bf2e79fa8acfd003"}, - {file = "coverage-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:04a92a6cf9afd99f9979c61348ec79725a9f9342fb45e63c889e33c04610d97b"}, - {file = "coverage-6.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:479228e1b798d3c246ac89b09897ee706c51b3e5f8f8d778067f38db73ccc717"}, - {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78287731e3601ea5ce9d6468c82d88a12ef8fe625d6b7bdec9b45d96c1ad6533"}, - {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c95257aa2ccf75d3d91d772060538d5fea7f625e48157f8ca44594f94d41cb33"}, - {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9ad5895938a894c368d49d8470fe9f519909e5ebc6b8f8ea5190bd0df6aa4271"}, - {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:326d944aad0189603733d646e8d4a7d952f7145684da973c463ec2eefe1387c2"}, - {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e7d5606b9240ed4def9cbdf35be4308047d11e858b9c88a6c26974758d6225ce"}, - {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:572f917267f363101eec375c109c9c1118037c7cc98041440b5eabda3185ac7b"}, - {file = "coverage-6.1.1-cp37-cp37m-win32.whl", hash = "sha256:35cd2230e1ed76df7d0081a997f0fe705be1f7d8696264eb508076e0d0b5a685"}, - {file = "coverage-6.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:65ad3ff837c89a229d626b8004f0ee32110f9bfdb6a88b76a80df36ccc60d926"}, - {file = "coverage-6.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:977ce557d79577a3dd510844904d5d968bfef9489f512be65e2882e1c6eed7d8"}, - {file = "coverage-6.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62512c0ec5d307f56d86504c58eace11c1bc2afcdf44e3ff20de8ca427ca1d0e"}, - {file = "coverage-6.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2e5b9c17a56b8bf0c0a9477fcd30d357deb486e4e1b389ed154f608f18556c8a"}, - {file = "coverage-6.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:666c6b32b69e56221ad1551d377f718ed00e6167c7a1b9257f780b105a101271"}, - {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fb2fa2f6506c03c48ca42e3fe5a692d7470d290c047ee6de7c0f3e5fa7639ac9"}, - {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f0f80e323a17af63eac6a9db0c9188c10f1fd815c3ab299727150cc0eb92c7a4"}, - {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:738e823a746841248b56f0f3bd6abf3b73af191d1fd65e4c723b9c456216f0ad"}, - {file = "coverage-6.1.1-cp38-cp38-win32.whl", hash = "sha256:8605add58e6a960729aa40c0fd9a20a55909dd9b586d3e8104cc7f45869e4c6b"}, - {file = "coverage-6.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:6e994003e719458420e14ffb43c08f4c14990e20d9e077cb5cad7a3e419bbb54"}, - {file = "coverage-6.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e3c4f5211394cd0bf6874ac5d29684a495f9c374919833dcfff0bd6d37f96201"}, - {file = "coverage-6.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e14bceb1f3ae8a14374be2b2d7bc12a59226872285f91d66d301e5f41705d4d6"}, - {file = "coverage-6.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0147f7833c41927d84f5af9219d9b32f875c0689e5e74ac8ca3cb61e73a698f9"}, - {file = "coverage-6.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b1d0a1bce919de0dd8da5cff4e616b2d9e6ebf3bd1410ff645318c3dd615010a"}, - {file = "coverage-6.1.1-cp39-cp39-win32.whl", hash = "sha256:a11a2c019324fc111485e79d55907e7289e53d0031275a6c8daed30690bc50c0"}, - {file = "coverage-6.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4d8b453764b9b26b0dd2afb83086a7c3f9379134e340288d2a52f8a91592394b"}, - {file = "coverage-6.1.1-pp36-none-any.whl", hash = "sha256:3b270c6b48d3ff5a35deb3648028ba2643ad8434b07836782b1139cf9c66313f"}, - {file = "coverage-6.1.1-pp37-none-any.whl", hash = "sha256:ffa8fee2b1b9e60b531c4c27cf528d6b5d5da46b1730db1f4d6eee56ff282e07"}, - {file = "coverage-6.1.1-pp38-none-any.whl", hash = "sha256:4cd919057636f63ab299ccb86ea0e78b87812400c76abab245ca385f17d19fb5"}, - {file = "coverage-6.1.1.tar.gz", hash = "sha256:b8e4f15b672c9156c1154249a9c5746e86ac9ae9edc3799ee3afebc323d9d9e0"}, + {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, + {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, + {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, + {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, + {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, + {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, + {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, + {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, + {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, + {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, + {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, + {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, + {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, + {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, + {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, + {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, + {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, + {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, + {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, + {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, + {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, + {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, + {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, + {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, + {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, + {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, + {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, + {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, + {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, ] cryptography = [ - {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"}, - {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"}, - {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"}, - {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"}, - {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"}, - {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"}, - {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"}, - {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"}, - {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"}, - {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:9511416e85e449fe1de73f7f99b21b3aa04fba4c4d335d30c486ba3756e3a2a6"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:97199a13b772e74cdcdb03760c32109c808aff7cd49c29e9cf4b7754bb725d1d"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:494106e9cd945c2cadfce5374fa44c94cfadf01d4566a3b13bb487d2e6c7959e"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fbbbb8aab4053fa018984bb0e95a16faeb051dd8cca15add2a27e267ba02b58"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:684993ff6f67000a56454b41bdc7e015429732d65a52d06385b6e9de6181c71e"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c702855cd3174666ef0d2d13dcc879090aa9c6c38f5578896407a7028f75b9f"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d91bc9f535599bed58f6d2e21a2724cb0c3895bf41c6403fe881391d29096f1d"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b17d83b3d1610e571fedac21b2eb36b816654d6f7496004d6a0d32f99d1d8120"}, + {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8982c19bb90a4fa2aad3d635c6d71814e38b643649b4000a8419f8691f20ac44"}, + {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:24469d9d33217ffd0ce4582dfcf2a76671af115663a95328f63c99ec7ece61a4"}, + {file = "cryptography-36.0.0-cp36-abi3-win32.whl", hash = "sha256:f6a5a85beb33e57998dc605b9dbe7deaa806385fdf5c4810fb849fcd04640c81"}, + {file = "cryptography-36.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:2deab5ec05d83ddcf9b0916319674d3dae88b0e7ee18f8962642d3cde0496568"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2049f8b87f449fc6190350de443ee0c1dd631f2ce4fa99efad2984de81031681"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a776bae1629c8d7198396fd93ec0265f8dd2341c553dc32b976168aaf0e6a636"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:aa94d617a4cd4cdf4af9b5af65100c036bce22280ebb15d8b5262e8273ebc6ba"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5c49c9e8fb26a567a2b3fa0343c89f5d325447956cc2fc7231c943b29a973712"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef216d13ac8d24d9cd851776662f75f8d29c9f2d05cdcc2d34a18d32463a9b0b"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231c4a69b11f6af79c1495a0e5a85909686ea8db946935224b7825cfb53827ed"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f92556f94e476c1b616e6daec5f7ddded2c082efa7cee7f31c7aeda615906ed8"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d73e3a96c38173e0aa5646c31bf8473bc3564837977dd480f5cbeacf1d7ef3a3"}, + {file = "cryptography-36.0.0.tar.gz", hash = "sha256:52f769ecb4ef39865719aedc67b4b7eae167bafa48dbc2a26dd36fa56460507f"}, ] dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, @@ -2603,101 +2385,24 @@ execnet = [ {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, ] filelock = [ - {file = "filelock-3.3.2-py3-none-any.whl", hash = "sha256:bb2a1c717df74c48a2d00ed625e5a66f8572a3a30baacb7657add1d7bac4097b"}, - {file = "filelock-3.3.2.tar.gz", hash = "sha256:7afc856f74fa7006a289fd10fa840e1eebd8bbff6bffb69c26c54a0512ea8cf8"}, + {file = "filelock-3.4.0-py3-none-any.whl", hash = "sha256:2e139a228bcf56dd8b2274a65174d005c4a6b68540ee0bdbb92c76f43f29f7e8"}, + {file = "filelock-3.4.0.tar.gz", hash = "sha256:93d512b32a23baf4cac44ffd72ccf70732aeff7b8050fcaf6d3ec406d954baf4"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] -frozenlist = [ - {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:977a1438d0e0d96573fd679d291a1542097ea9f4918a8b6494b06610dfeefbf9"}, - {file = "frozenlist-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8d86547a5e98d9edd47c432f7a14b0c5592624b496ae9880fb6332f34af1edc"}, - {file = "frozenlist-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:181754275d5d32487431a0a29add4f897968b7157204bc1eaaf0a0ce80c5ba7d"}, - {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5df31bb2b974f379d230a25943d9bf0d3bc666b4b0807394b131a28fca2b0e5f"}, - {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4766632cd8a68e4f10f156a12c9acd7b1609941525569dd3636d859d79279ed3"}, - {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16eef427c51cb1203a7c0ab59d1b8abccaba9a4f58c4bfca6ed278fc896dc193"}, - {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:01d79515ed5aa3d699b05f6bdcf1fe9087d61d6b53882aa599a10853f0479c6c"}, - {file = "frozenlist-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:28e164722ea0df0cf6d48c4d5bdf3d19e87aaa6dfb39b0ba91153f224b912020"}, - {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e63ad0beef6ece06475d29f47d1f2f29727805376e09850ebf64f90777962792"}, - {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41de4db9b9501679cf7cddc16d07ac0f10ef7eb58c525a1c8cbff43022bddca4"}, - {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a9d84ee6427b65a81fc24e6ef589cb794009f5ca4150151251c062773e7ed2"}, - {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:f5f3b2942c3b8b9bfe76b408bbaba3d3bb305ee3693e8b1d631fe0a0d4f93673"}, - {file = "frozenlist-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c98d3c04701773ad60d9545cd96df94d955329efc7743fdb96422c4b669c633b"}, - {file = "frozenlist-1.2.0-cp310-cp310-win32.whl", hash = "sha256:72cfbeab7a920ea9e74b19aa0afe3b4ad9c89471e3badc985d08756efa9b813b"}, - {file = "frozenlist-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:11ff401951b5ac8c0701a804f503d72c048173208490c54ebb8d7bb7c07a6d00"}, - {file = "frozenlist-1.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b46f997d5ed6d222a863b02cdc9c299101ee27974d9bbb2fd1b3c8441311c408"}, - {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351686ca020d1bcd238596b1fa5c8efcbc21bffda9d0efe237aaa60348421e2a"}, - {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfbaa08cf1452acad9cb1c1d7b89394a41e712f88df522cea1a0f296b57782a0"}, - {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ae2f5e9fa10805fb1c9adbfefaaecedd9e31849434be462c3960a0139ed729"}, - {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6790b8d96bbb74b7a6f4594b6f131bd23056c25f2aa5d816bd177d95245a30e3"}, - {file = "frozenlist-1.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:41f62468af1bd4e4b42b5508a3fe8cc46a693f0cdd0ca2f443f51f207893d837"}, - {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:ec6cf345771cdb00791d271af9a0a6fbfc2b6dd44cb753f1eeaa256e21622adb"}, - {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:14a5cef795ae3e28fb504b73e797c1800e9249f950e1c964bb6bdc8d77871161"}, - {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8b54cdd2fda15467b9b0bfa78cee2ddf6dbb4585ef23a16e14926f4b076dfae4"}, - {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f025f1d6825725b09c0038775acab9ae94264453a696cc797ce20c0769a7b367"}, - {file = "frozenlist-1.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:84e97f59211b5b9083a2e7a45abf91cfb441369e8bb6d1f5287382c1c526def3"}, - {file = "frozenlist-1.2.0-cp36-cp36m-win32.whl", hash = "sha256:c5328ed53fdb0a73c8a50105306a3bc013e5ca36cca714ec4f7bd31d38d8a97f"}, - {file = "frozenlist-1.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:9ade70aea559ca98f4b1b1e5650c45678052e76a8ab2f76d90f2ac64180215a2"}, - {file = "frozenlist-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0d3ffa8772464441b52489b985d46001e2853a3b082c655ec5fad9fb6a3d618"}, - {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3457f8cf86deb6ce1ba67e120f1b0128fcba1332a180722756597253c465fc1d"}, - {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a72eecf37eface331636951249d878750db84034927c997d47f7f78a573b72b"}, - {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:acc4614e8d1feb9f46dd829a8e771b8f5c4b1051365d02efb27a3229048ade8a"}, - {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:87521e32e18a2223311afc2492ef2d99946337da0779ddcda77b82ee7319df59"}, - {file = "frozenlist-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b4c7665a17c3a5430edb663e4ad4e1ad457614d1b2f2b7f87052e2ef4fa45ca"}, - {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ed58803563a8c87cf4c0771366cf0ad1aa265b6b0ae54cbbb53013480c7ad74d"}, - {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa44c4740b4e23fcfa259e9dd52315d2b1770064cde9507457e4c4a65a04c397"}, - {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2de5b931701257d50771a032bba4e448ff958076380b049fd36ed8738fdb375b"}, - {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6e105013fa84623c057a4381dc8ea0361f4d682c11f3816cc80f49a1f3bc17c6"}, - {file = "frozenlist-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:705c184b77565955a99dc360f359e8249580c6b7eaa4dc0227caa861ef46b27a"}, - {file = "frozenlist-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:a37594ad6356e50073fe4f60aa4187b97d15329f2138124d252a5a19c8553ea4"}, - {file = "frozenlist-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:25b358aaa7dba5891b05968dd539f5856d69f522b6de0bf34e61f133e077c1a4"}, - {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af2a51c8a381d76eabb76f228f565ed4c3701441ecec101dd18be70ebd483cfd"}, - {file = "frozenlist-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:82d22f6e6f2916e837c91c860140ef9947e31194c82aaeda843d6551cec92f19"}, - {file = "frozenlist-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cfe6fef507f8bac40f009c85c7eddfed88c1c0d38c75e72fe10476cef94e10f"}, - {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f602e380a5132880fa245c92030abb0fc6ff34e0c5500600366cedc6adb06a"}, - {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ad065b2ebd09f32511ff2be35c5dfafee6192978b5a1e9d279a5c6e121e3b03"}, - {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc93f5f62df3bdc1f677066327fc81f92b83644852a31c6aa9b32c2dde86ea7d"}, - {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:89fdfc84c6bf0bff2ff3170bb34ecba8a6911b260d318d377171429c4be18c73"}, - {file = "frozenlist-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:47b2848e464883d0bbdcd9493c67443e5e695a84694efff0476f9059b4cb6257"}, - {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4f52d0732e56906f8ddea4bd856192984650282424049c956857fed43697ea43"}, - {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:16ef7dd5b7d17495404a2e7a49bac1bc13d6d20c16d11f4133c757dd94c4144c"}, - {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1cf63243bc5f5c19762943b0aa9e0d3fb3723d0c514d820a18a9b9a5ef864315"}, - {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:54a1e09ab7a69f843cd28fefd2bcaf23edb9e3a8d7680032c8968b8ac934587d"}, - {file = "frozenlist-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:954b154a4533ef28bd3e83ffdf4eadf39deeda9e38fb8feaf066d6069885e034"}, - {file = "frozenlist-1.2.0-cp38-cp38-win32.whl", hash = "sha256:cb3957c39668d10e2b486acc85f94153520a23263b6401e8f59422ef65b9520d"}, - {file = "frozenlist-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0a7c7cce70e41bc13d7d50f0e5dd175f14a4f1837a8549b0936ed0cbe6170bf9"}, - {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4c457220468d734e3077580a3642b7f682f5fd9507f17ddf1029452450912cdc"}, - {file = "frozenlist-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e74f8b4d8677ebb4015ac01fcaf05f34e8a1f22775db1f304f497f2f88fdc697"}, - {file = "frozenlist-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fbd4844ff111449f3bbe20ba24fbb906b5b1c2384d0f3287c9f7da2354ce6d23"}, - {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0081a623c886197ff8de9e635528fd7e6a387dccef432149e25c13946cb0cd0"}, - {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b6e21e5770df2dea06cb7b6323fbc008b13c4a4e3b52cb54685276479ee7676"}, - {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:406aeb340613b4b559db78d86864485f68919b7141dec82aba24d1477fd2976f"}, - {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:878ebe074839d649a1cdb03a61077d05760624f36d196884a5cafb12290e187b"}, - {file = "frozenlist-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1fef737fd1388f9b93bba8808c5f63058113c10f4e3c0763ced68431773f72f9"}, - {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a495c3d513573b0b3f935bfa887a85d9ae09f0627cf47cad17d0cc9b9ba5c38"}, - {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7d0dd3e727c70c2680f5f09a0775525229809f1a35d8552b92ff10b2b14f2c2"}, - {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:66a518731a21a55b7d3e087b430f1956a36793acc15912e2878431c7aec54210"}, - {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:94728f97ddf603d23c8c3dd5cae2644fa12d33116e69f49b1644a71bb77b89ae"}, - {file = "frozenlist-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c1e8e9033d34c2c9e186e58279879d78c94dd365068a3607af33f2bc99357a53"}, - {file = "frozenlist-1.2.0-cp39-cp39-win32.whl", hash = "sha256:83334e84a290a158c0c4cc4d22e8c7cfe0bba5b76d37f1c2509dabd22acafe15"}, - {file = "frozenlist-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:735f386ec522e384f511614c01d2ef9cf799f051353876b4c6fb93ef67a6d1ee"}, - {file = "frozenlist-1.2.0.tar.gz", hash = "sha256:68201be60ac56aff972dc18085800b6ee07973c49103a8aba669dee3d71079de"}, -] fsspec = [ - {file = "fsspec-2021.10.1-py3-none-any.whl", hash = "sha256:7164a488f3f5bf6a0fb39674978b756dda84e011a5db411a79791b7c38a36ff7"}, - {file = "fsspec-2021.10.1.tar.gz", hash = "sha256:c245626e3cb8de5cd91485840b215a385fa6f2b0f6ab87978305e99e2d842753"}, + {file = "fsspec-2021.11.1-py3-none-any.whl", hash = "sha256:bcb136caa37e1470dd8314a7d3917cb9b25dd9da44c10d36df556ab4ef038185"}, + {file = "fsspec-2021.11.1.tar.gz", hash = "sha256:03683e606651d5e4bd9180525d57477bd5430e5dc68d2e459835dc14cecc3dd4"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -idna-ssl = [ - {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, -] imagesize = [ - {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, - {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, + {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, + {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] immutables = [ {file = "immutables-0.16-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:acbfa79d44228d96296279068441f980dc63dbed52522d9227ff9f4d96c6627e"}, @@ -2745,24 +2450,24 @@ ipykernel = [ {file = "ipykernel-5.5.6.tar.gz", hash = "sha256:4ea44b90ae1f7c38987ad58ea0809562a17c2695a0499644326f334aecd369ec"}, ] ipython = [ - {file = "ipython-7.16.1-py3-none-any.whl", hash = "sha256:2dbcc8c27ca7d3cfe4fcdff7f45b27f9a8d3edfa70ff8024a71c7a8eb5f09d64"}, - {file = "ipython-7.16.1.tar.gz", hash = "sha256:9f4fcb31d3b2c533333893b9172264e4821c1ac91839500f31bd43f2c59b3ccf"}, + {file = "ipython-7.16.2-py3-none-any.whl", hash = "sha256:2f644313be4fdc5c8c2a17467f2949c29423c9e283a159d1fc9bf450a1a300af"}, + {file = "ipython-7.16.2.tar.gz", hash = "sha256:613085f8acb0f35f759e32bea35fba62c651a4a2e409a0da11414618f5eec0c4"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] isort = [ - {file = "isort-5.10.0-py3-none-any.whl", hash = "sha256:1a18ccace2ed8910bd9458b74a3ecbafd7b2f581301b0ab65cfdd4338272d76f"}, - {file = "isort-5.10.0.tar.gz", hash = "sha256:e52ff6d38012b131628cf0f26c51e7bd3a7c81592eefe3ac71411e692f1b9345"}, + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] jedi = [ - {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, - {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, + {file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"}, + {file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"}, ] jinja2 = [ - {file = "Jinja2-3.0.2-py3-none-any.whl", hash = "sha256:8569982d3f0889eed11dd620c706d39b60c36d6d25843961f33f77fb6bc6b20c"}, - {file = "Jinja2-3.0.2.tar.gz", hash = "sha256:827a0e32839ab1600d4eb1c4c33ec5a8edfbc5cb42dafa13b81f182f97784b45"}, + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, @@ -2782,20 +2487,20 @@ jsonschema = [ {file = "jsonschema-4.0.0.tar.gz", hash = "sha256:bc51325b929171791c42ebc1c70b9713eb134d3bb8ebd5474c8b659b15be6d86"}, ] jupyter-client = [ - {file = "jupyter_client-7.0.6-py3-none-any.whl", hash = "sha256:074bdeb1ffaef4a3095468ee16313938cfdc48fc65ca95cc18980b956c2e5d79"}, - {file = "jupyter_client-7.0.6.tar.gz", hash = "sha256:8b6e06000eb9399775e0a55c52df6c1be4766666209c22f90c2691ded0e338dc"}, + {file = "jupyter_client-7.1.0-py3-none-any.whl", hash = "sha256:64d93752d8cbfba0c1030c3335c3f0d9797cd1efac012652a14aac1653db11a3"}, + {file = "jupyter_client-7.1.0.tar.gz", hash = "sha256:a5f995a73cffb314ed262713ae6dfce53c6b8216cea9f332071b8ff44a6e1654"}, ] jupyter-core = [ {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"}, {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"}, ] jupyter-server = [ - {file = "jupyter_server-1.11.2-py3-none-any.whl", hash = "sha256:eb247b555f5bdfb4a219d78e86bc8769456a1a712d8e30a4dbe06e3fe7e8a278"}, - {file = "jupyter_server-1.11.2.tar.gz", hash = "sha256:c1f32e0c1807ab2de37bf70af97a36b4436db0bc8af3124632b1f4441038bf95"}, + {file = "jupyter_server-1.12.1-py3-none-any.whl", hash = "sha256:93a84d06c35613ecf3bc5de8ff2d92a410a3a5f57a3a23444ca75e4b2b390209"}, + {file = "jupyter_server-1.12.1.tar.gz", hash = "sha256:f71e10ebaa6704a1e0fe76ec70a16a0804ab5a9d268f0c512e8c69086a8e86d1"}, ] jupyterlab = [ - {file = "jupyterlab-3.2.2-py3-none-any.whl", hash = "sha256:c970ed2e703831e02171d7bacae35b1e42a227b154bb3684a88ddf64167278bc"}, - {file = "jupyterlab-3.2.2.tar.gz", hash = "sha256:215dcbc2674bf1c74eca16b30eac49b882d41503c522ed337fb0053c89565ec8"}, + {file = "jupyterlab-3.2.4-py3-none-any.whl", hash = "sha256:b2375626001ab48af85e5da542a56a163ac8b490828642757e4e0e5e8c5af59d"}, + {file = "jupyterlab-3.2.4.tar.gz", hash = "sha256:f692e0d95338d60f72dde660f16f3955a087775c59ec541ddb25952e3f97e9b1"}, ] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, @@ -2892,22 +2597,12 @@ lxml = [ {file = "lxml-4.6.4.tar.gz", hash = "sha256:daf9bd1fee31f1c7a5928b3e1059e09a8d683ea58fb3ffc773b6c88cb8d1399c"}, ] markupsafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, @@ -2916,21 +2611,14 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, @@ -2940,9 +2628,6 @@ markupsafe = [ {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, @@ -2955,87 +2640,9 @@ mistune = [ {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] -more-itertools = [ - {file = "more-itertools-8.10.0.tar.gz", hash = "sha256:1debcabeb1df793814859d64a81ad7cb10504c24349368ccf214c664c474f41f"}, - {file = "more_itertools-8.10.0-py3-none-any.whl", hash = "sha256:56ddac45541718ba332db05f464bebfb0768110111affd27f66e0051f276fa43"}, -] moto = [ - {file = "moto-2.2.12-py2.py3-none-any.whl", hash = "sha256:bc6d77f7ff51af7cdecb28975d7a795faac3d04decb99bacfecc603b58a5ce81"}, - {file = "moto-2.2.12.tar.gz", hash = "sha256:e83ff38cbbf901a11b21c344c101f6e18810868145e2e2f8ff34857025f06a5f"}, -] -multidict = [ - {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3822c5894c72e3b35aae9909bef66ec83e44522faf767c0ad39e0e2de11d3b55"}, - {file = "multidict-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:28e6d883acd8674887d7edc896b91751dc2d8e87fbdca8359591a13872799e4e"}, - {file = "multidict-5.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b61f85101ef08cbbc37846ac0e43f027f7844f3fade9b7f6dd087178caedeee7"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9b668c065968c5979fe6b6fa6760bb6ab9aeb94b75b73c0a9c1acf6393ac3bf"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517d75522b7b18a3385726b54a081afd425d4f41144a5399e5abd97ccafdf36b"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b4ac3ba7a97b35a5ccf34f41b5a8642a01d1e55454b699e5e8e7a99b5a3acf5"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:df23c83398715b26ab09574217ca21e14694917a0c857e356fd39e1c64f8283f"}, - {file = "multidict-5.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e58a9b5cc96e014ddf93c2227cbdeca94b56a7eb77300205d6e4001805391747"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f76440e480c3b2ca7f843ff8a48dc82446b86ed4930552d736c0bac507498a52"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cfde464ca4af42a629648c0b0d79b8f295cf5b695412451716531d6916461628"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0fed465af2e0eb6357ba95795d003ac0bdb546305cc2366b1fc8f0ad67cc3fda"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b70913cbf2e14275013be98a06ef4b412329fe7b4f83d64eb70dce8269ed1e1a"}, - {file = "multidict-5.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5635bcf1b75f0f6ef3c8a1ad07b500104a971e38d3683167b9454cb6465ac86"}, - {file = "multidict-5.2.0-cp310-cp310-win32.whl", hash = "sha256:77f0fb7200cc7dedda7a60912f2059086e29ff67cefbc58d2506638c1a9132d7"}, - {file = "multidict-5.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9416cf11bcd73c861267e88aea71e9fcc35302b3943e45e1dbb4317f91a4b34f"}, - {file = "multidict-5.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd77c8f3cba815aa69cb97ee2b2ef385c7c12ada9c734b0f3b32e26bb88bbf1d"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ec9aea6223adf46999f22e2c0ab6cf33f5914be604a404f658386a8f1fba37"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5283c0a00f48e8cafcecadebfa0ed1dac8b39e295c7248c44c665c16dc1138b"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5f79c19c6420962eb17c7e48878a03053b7ccd7b69f389d5831c0a4a7f1ac0a1"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e4a67f1080123de76e4e97a18d10350df6a7182e243312426d508712e99988d4"}, - {file = "multidict-5.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:94b117e27efd8e08b4046c57461d5a114d26b40824995a2eb58372b94f9fca02"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2e77282fd1d677c313ffcaddfec236bf23f273c4fba7cdf198108f5940ae10f5"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:116347c63ba049c1ea56e157fa8aa6edaf5e92925c9b64f3da7769bdfa012858"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:dc3a866cf6c13d59a01878cd806f219340f3e82eed514485e094321f24900677"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac42181292099d91217a82e3fa3ce0e0ddf3a74fd891b7c2b347a7f5aa0edded"}, - {file = "multidict-5.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:f0bb0973f42ffcb5e3537548e0767079420aefd94ba990b61cf7bb8d47f4916d"}, - {file = "multidict-5.2.0-cp36-cp36m-win32.whl", hash = "sha256:ea21d4d5104b4f840b91d9dc8cbc832aba9612121eaba503e54eaab1ad140eb9"}, - {file = "multidict-5.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6453f3cbeb78440747096f239d282cc57a2997a16b5197c9bc839099e1633d0"}, - {file = "multidict-5.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3def943bfd5f1c47d51fd324df1e806d8da1f8e105cc7f1c76a1daf0f7e17b0"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35591729668a303a02b06e8dba0eb8140c4a1bfd4c4b3209a436a02a5ac1de11"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8cacda0b679ebc25624d5de66c705bc53dcc7c6f02a7fb0f3ca5e227d80422"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:baf1856fab8212bf35230c019cde7c641887e3fc08cadd39d32a421a30151ea3"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a43616aec0f0d53c411582c451f5d3e1123a68cc7b3475d6f7d97a626f8ff90d"}, - {file = "multidict-5.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25cbd39a9029b409167aa0a20d8a17f502d43f2efebfe9e3ac019fe6796c59ac"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a2cbcfbea6dc776782a444db819c8b78afe4db597211298dd8b2222f73e9cd0"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d2d7d1fff8e09d99354c04c3fd5b560fb04639fd45926b34e27cfdec678a704"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a37e9a68349f6abe24130846e2f1d2e38f7ddab30b81b754e5a1fde32f782b23"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:637c1896497ff19e1ee27c1c2c2ddaa9f2d134bbb5e0c52254361ea20486418d"}, - {file = "multidict-5.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9815765f9dcda04921ba467957be543423e5ec6a1136135d84f2ae092c50d87b"}, - {file = "multidict-5.2.0-cp37-cp37m-win32.whl", hash = "sha256:8b911d74acdc1fe2941e59b4f1a278a330e9c34c6c8ca1ee21264c51ec9b67ef"}, - {file = "multidict-5.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:380b868f55f63d048a25931a1632818f90e4be71d2081c2338fcf656d299949a"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e7d81ce5744757d2f05fc41896e3b2ae0458464b14b5a2c1e87a6a9d69aefaa8"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d1d55cdf706ddc62822d394d1df53573d32a7a07d4f099470d3cb9323b721b6"}, - {file = "multidict-5.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4771d0d0ac9d9fe9e24e33bed482a13dfc1256d008d101485fe460359476065"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da7d57ea65744d249427793c042094c4016789eb2562576fb831870f9c878d9e"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdd68778f96216596218b4e8882944d24a634d984ee1a5a049b300377878fa7c"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecc99bce8ee42dcad15848c7885197d26841cb24fa2ee6e89d23b8993c871c64"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:067150fad08e6f2dd91a650c7a49ba65085303fcc3decbd64a57dc13a2733031"}, - {file = "multidict-5.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:78c106b2b506b4d895ddc801ff509f941119394b89c9115580014127414e6c2d"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6c4fa1ec16e01e292315ba76eb1d012c025b99d22896bd14a66628b245e3e01"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b227345e4186809d31f22087d0265655114af7cda442ecaf72246275865bebe4"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:06560fbdcf22c9387100979e65b26fba0816c162b888cb65b845d3def7a54c9b"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7878b61c867fb2df7a95e44b316f88d5a3742390c99dfba6c557a21b30180cac"}, - {file = "multidict-5.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:246145bff76cc4b19310f0ad28bd0769b940c2a49fc601b86bfd150cbd72bb22"}, - {file = "multidict-5.2.0-cp38-cp38-win32.whl", hash = "sha256:c30ac9f562106cd9e8071c23949a067b10211917fdcb75b4718cf5775356a940"}, - {file = "multidict-5.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:f19001e790013ed580abfde2a4465388950728861b52f0da73e8e8a9418533c0"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c1ff762e2ee126e6f1258650ac641e2b8e1f3d927a925aafcfde943b77a36d24"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd6c9c50bf2ad3f0448edaa1a3b55b2e6866ef8feca5d8dbec10ec7c94371d21"}, - {file = "multidict-5.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc66d4016f6e50ed36fb39cd287a3878ffcebfa90008535c62e0e90a7ab713ae"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9acb76d5f3dd9421874923da2ed1e76041cb51b9337fd7f507edde1d86535d6"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfc924a7e946dd3c6360e50e8f750d51e3ef5395c95dc054bc9eab0f70df4f9c"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32fdba7333eb2351fee2596b756d730d62b5827d5e1ab2f84e6cbb287cc67fe0"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9aad49466b8d828b96b9e3630006234879c8d3e2b0a9d99219b3121bc5cdb17"}, - {file = "multidict-5.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93de39267c4c676c9ebb2057e98a8138bade0d806aad4d864322eee0803140a0"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9bef5cff994ca3026fcc90680e326d1a19df9841c5e3d224076407cc21471a1"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5f841c4f14331fd1e36cbf3336ed7be2cb2a8f110ce40ea253e5573387db7621"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:38ba256ee9b310da6a1a0f013ef4e422fca30a685bcbec86a969bd520504e341"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3bc3b1621b979621cee9f7b09f024ec76ec03cc365e638126a056317470bde1b"}, - {file = "multidict-5.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6ee908c070020d682e9b42c8f621e8bb10c767d04416e2ebe44e37d0f44d9ad5"}, - {file = "multidict-5.2.0-cp39-cp39-win32.whl", hash = "sha256:1c7976cd1c157fa7ba5456ae5d31ccdf1479680dc9b8d8aa28afabc370df42b8"}, - {file = "multidict-5.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:c9631c642e08b9fff1c6255487e62971d8b8e821808ddd013d8ac058087591ac"}, - {file = "multidict-5.2.0.tar.gz", hash = "sha256:0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"}, + {file = "moto-2.2.17-py2.py3-none-any.whl", hash = "sha256:73aa14a650cb3bf02ca720b343618a57dda4c2c1d1166708a4c5c98ea9013b29"}, + {file = "moto-2.2.17.tar.gz", hash = "sha256:221ebd16b41b3ae157554ca5e540a8c1b4b1c93443cbf854c1f04751194c51b6"}, ] mypy = [ {file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, @@ -3071,8 +2678,8 @@ nbclassic = [ {file = "nbclassic-0.3.4.tar.gz", hash = "sha256:f00b07ef4908fc38fd332d2676ccd3ceea5076528feaf21bd27e809ef20f5578"}, ] nbclient = [ - {file = "nbclient-0.5.4-py3-none-any.whl", hash = "sha256:95a300c6fbe73721736cf13972a46d8d666f78794b832866ed7197a504269e11"}, - {file = "nbclient-0.5.4.tar.gz", hash = "sha256:6c8ad36a28edad4562580847f9f1636fe5316a51a323ed85a24a4ad37d4aefce"}, + {file = "nbclient-0.5.9-py3-none-any.whl", hash = "sha256:8a307be4129cce5f70eb83a57c3edbe45656623c31de54e38bb6fdfbadc428b3"}, + {file = "nbclient-0.5.9.tar.gz", hash = "sha256:99e46ddafacd0b861293bf246fed8540a184adfa3aa7d641f89031ec070701e0"}, ] nbconvert = [ {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, @@ -3091,12 +2698,12 @@ nbsphinx-link = [ {file = "nbsphinx_link-1.3.0-py2.py3-none-any.whl", hash = "sha256:67c24fc6508765203afb4b6939c0d9127e17a5d8d9355bfe8458192cf7105eb9"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.5.1-py3-none-any.whl", hash = "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c"}, - {file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"}, + {file = "nest_asyncio-1.5.4-py3-none-any.whl", hash = "sha256:3fdd0d6061a2bb16f21fe8a9c6a7945be83521d81a0d15cff52e9edee50101d6"}, + {file = "nest_asyncio-1.5.4.tar.gz", hash = "sha256:f969f6013a16fadb4adcf09d11a68a4f617c6049d7af7ac2c676110169a63abd"}, ] notebook = [ - {file = "notebook-6.4.5-py3-none-any.whl", hash = "sha256:f7b4362698fed34f44038de0517b2e5136c1e7c379797198c1736121d3d597bd"}, - {file = "notebook-6.4.5.tar.gz", hash = "sha256:872e20da9ae518bbcac3e4e0092d5bd35454e847dedb8cb9739e9f3b68406be0"}, + {file = "notebook-6.4.6-py3-none-any.whl", hash = "sha256:5cad068fa82cd4fb98d341c052100ed50cd69fbfb4118cb9b8ab5a346ef27551"}, + {file = "notebook-6.4.6.tar.gz", hash = "sha256:7bcdf79bd1cda534735bd9830d2cbedab4ee34d8fe1df6e7b946b3aab0902ba3"}, ] numpy = [ {file = "numpy-1.19.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc6bd4fd593cb261332568485e20a0712883cf631f6f5e8e86a52caa8b2b50ff"}, @@ -3143,8 +2750,8 @@ opensearch-py = [ {file = "opensearch_py-1.0.0-py2.py3-none-any.whl", hash = "sha256:17afebc25dc890b96c4e9ec8692dcfdb6842c028ce8c2d252e8f55c587960177"}, ] packaging = [ - {file = "packaging-21.2-py3-none-any.whl", hash = "sha256:14317396d1e8cdb122989b916fa2c7e9ca8e2be9e8060a6eff75b6b7b4d8a7e0"}, - {file = "packaging-21.2.tar.gz", hash = "sha256:096d689d78ca690e4cd8a89568ba06d07ca097e3306a4381635073ca91479966"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pandas = [ {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, @@ -3219,24 +2826,24 @@ pandocfilters = [ {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] parso = [ - {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, - {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, + {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"}, + {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"}, ] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pbr = [ - {file = "pbr-5.7.0-py2.py3-none-any.whl", hash = "sha256:60002958e459b195e8dbe61bf22bcf344eedf1b4e03a321a5414feb15566100c"}, - {file = "pbr-5.7.0.tar.gz", hash = "sha256:4651ca1445e80f2781827305de3d76b3ce53195f2227762684eb08f17bc473b7"}, + {file = "pbr-5.8.0-py2.py3-none-any.whl", hash = "sha256:176e8560eaf61e127817ef93d8a844803abb27a4d4637f0ff3bb783129be2e0a"}, + {file = "pbr-5.8.0.tar.gz", hash = "sha256:672d8ebee84921862110f23fcec2acea191ef58543d34dfe9ef3d9f13c31cddf"}, ] pexpect = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, ] pg8000 = [ - {file = "pg8000-1.22.0-py3-none-any.whl", hash = "sha256:a0e82542f4a56b2139c41ff09c1aeff294c10b6500bb6c57890c0c1c551cbc03"}, - {file = "pg8000-1.22.0.tar.gz", hash = "sha256:c5172252fc92142ec104cd5e7231be4580a1a0a814403707bafbf7bb8383a29a"}, + {file = "pg8000-1.22.1-py3-none-any.whl", hash = "sha256:fa95b112ba940e227b6de81bda1a16a653d4fdafa7d92baf5116236210417989"}, + {file = "pg8000-1.22.1.tar.gz", hash = "sha256:fd69ad5af2781e59608cb3f4d966b234d961c62f093edd3194ac45df65b8d1d4"}, ] pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, @@ -3263,8 +2870,8 @@ prometheus-client = [ {file = "prometheus_client-0.12.0.tar.gz", hash = "sha256:1b12ba48cee33b9b0b9de64a1047cbd3c5f2d0ab6ebcead7ddda613a750ec3c5"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.22-py3-none-any.whl", hash = "sha256:48d85cdca8b6c4f16480c7ce03fd193666b62b0a21667ca56b4bb5ad679d1170"}, - {file = "prompt_toolkit-3.0.22.tar.gz", hash = "sha256:449f333dd120bd01f5d296a8ce1452114ba3a71fae7288d2f0ae2c918764fa72"}, + {file = "prompt_toolkit-3.0.23-py3-none-any.whl", hash = "sha256:5f29d62cb7a0ecacfa3d8ceea05a63cd22500543472d64298fc06ddda906b25d"}, + {file = "prompt_toolkit-3.0.23.tar.gz", hash = "sha256:7053aba00895473cb357819358ef33f11aa97e4ac83d38efb123e5649ceeecaf"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, @@ -3275,50 +2882,50 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pyarrow = [ - {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:c7a6e7e0bf8779e9c3428ced85507541f3da9a0675e2f4781d4eb2c7042cbf81"}, - {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:7a683f71b848eb6310b4ec48c0def55dac839e9994c1ac874c9b2d3d5625def1"}, - {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5144bd9db2920c7cb566c96462d62443cc239104f94771d110f74393f2fb42a2"}, - {file = "pyarrow-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed0be080cf595ea15ff1c9ff4097bbf1fcc4b50847d98c0a3c0412fbc6ede7e9"}, - {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:072c1a0fca4509eefd7d018b78542fb7e5c63aaf5698f1c0a6e45628ae17ba44"}, - {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5bed4f948c032c40597302e9bdfa65f62295240306976ecbe43a54924c6f94f"}, - {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:465f87fa0be0b2928b2beeba22b5813a0203fb05d90fd8563eea48e08ecc030e"}, - {file = "pyarrow-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ddf2e6e3b321adaaf716f2d5af8e92d205a9671e0cb7c0779710a567fd1dd580"}, - {file = "pyarrow-6.0.0-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:0204e80777ab8f4e9abd3a765a8ec07ed1e3c4630bacda50d2ce212ef0f3826f"}, - {file = "pyarrow-6.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:82fe80309e01acf29e3943a1f6d3c98ec109fe1d356bc1ac37d639bcaadcf684"}, - {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:281ce5fa03621d786a9beb514abb09846db7f0221b50eabf543caa24037eaacd"}, - {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5408fa8d623e66a0445f3fb0e4027fd219bf99bfb57422d543d7b7876e2c5b55"}, - {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19e58dfb04e451cd8b7bdec3ac8848373b95dfc53492c9a69789aa9074a3c1b"}, - {file = "pyarrow-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b86d175262db1eb46afdceb36d459409eb6f8e532d3dec162f8bf572c7f57623"}, - {file = "pyarrow-6.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:2d2c681659396c745e4f1988d5dd41dcc3ad557bb8d4a8c2e44030edafc08a91"}, - {file = "pyarrow-6.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c666bc6a1cebf01206e2dc1ab05f25f39f35d3a499e0ef5cd635225e07306ca"}, - {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8d41dfb09ba9236cca6245f33088eb42f3c54023da281139241e0f9f3b4b754e"}, - {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c746ef42c039348a288584800e299456c80c5691401bb9b19aa9c02a427b7"}, - {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c38263ea438a1666b13372e7565450cfeec32dbcd1c2595749476a58465eaec"}, - {file = "pyarrow-6.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e81508239a71943759cee272ce625ae208092dd36ef2c6713fccee30bbcf52bb"}, - {file = "pyarrow-6.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:a50d2f77b86af38ceabf45617208b9105d20e7a5eebc584e7c8c0acededd82ce"}, - {file = "pyarrow-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda7595f24a639bcef3419ecfac17216efacb09f7b0f1b4c4c97f900d65ca0e"}, - {file = "pyarrow-6.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bf3400780c4d3c9cb43b1e8a1aaf2e1b7199a0572d0a645529d2784e4d0d8497"}, - {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15dc0d673d3f865ca63c877bd7a2eced70b0a08969fb733a28247134b8a1f18b"}, - {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1d9a2f4ee812ed0bd4182cabef99ea914ac297274f0de086f2488093d284ef"}, - {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d046dc78a9337baa6415be915c5a16222505233e238a1017f368243c89817eea"}, - {file = "pyarrow-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ea64a48a85c631eb2a0ea13ccdec5143c85b5897836b16331ee4289d27a57247"}, - {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:cc1d4a70efd583befe92d4ea6f74ed2e0aa31ccdde767cd5cae8e77c65a1c2d4"}, - {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:004185e0babc6f3c3fba6ba4f106e406a0113d0f82bb9ad9a8571a1978c45d04"}, - {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c23f8cdecd3d9e49f9b0f9a651ae5549d1d32fd4901fb1bdc2d327edfba844f"}, - {file = "pyarrow-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb701ec4a94b92102606d4e88f0b8eba34f09a5ad8e014eaa4af76f42b7f62ae"}, - {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da7860688c33ca88ac05f1a487d32d96d9caa091412496c35f3d1d832145675a"}, - {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac941a147d14993987cc8b605b721735a34b3e54d167302501fb4db1ad7382c7"}, - {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6163d82cca7541774b00503c295fe86a1722820eddb958b57f091bb6f5b0a6db"}, - {file = "pyarrow-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:376c4b5f248ae63df21fe15c194e9013753164be2d38f4b3fb8bde63ac5a1958"}, - {file = "pyarrow-6.0.0.tar.gz", hash = "sha256:5be62679201c441356d3f2a739895dcc8d4d299f2a6eabcd2163bfb6a898abba"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:c80d2436294a07f9cc54852aa1cef034b6f9c97d29235c4bd53bbf52e24f1ebf"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:f150b4f222d0ba397388908725692232345adaa8e58ad543ca00f03c7234ae7b"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3a727642c1283dcb44728f0d0a00f8864b171e31c835f4b8def07e3fa8f5c73"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d29605727865177918e806d855fd8404b6242bf1e56ade0a0023cd4fe5f7f841"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b63b54dd0bada05fff76c15b233f9322de0e6947071b7871ec45024e16045aeb"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e90e75cb11e61ffeffb374f1db7c4788f1df0cb269596bf86c473155294958d"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f4f3db1da51db4cfbafab3066a01b01578884206dced9f505da950d9ed4402d"}, + {file = "pyarrow-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:2523f87bd36877123fc8c4813f60d298722143ead73e907690a87e8557114693"}, + {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:8f7d34efb9d667f9204b40ce91a77613c46691c24cd098e3b6986bd7401b8f06"}, + {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3c9184335da8faf08c0df95668ce9d778df3795ce4eec959f44908742900e10"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02baee816456a6e64486e587caaae2bf9f084fa3a891354ff18c3e945a1cb72f"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604782b1c744b24a55df80125991a7154fbdef60991eb3d02bfaed06d22f055e"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab8132193ae095c43b1e8d6d7f393451ac198de5aaf011c6b576b1442966fec"}, + {file = "pyarrow-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:31038366484e538608f43920a5e2957b8862a43aa49438814619b527f50ec127"}, + {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:632bea00c2fbe2da5d29ff1698fec312ed3aabfb548f06100144e1907e22093a"}, + {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc03c875e5d68b0d0143f94c438add3ab3c2411ade2748423a9c24608fea571e"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1cd4de317df01679e538004123d6d7bc325d73bad5c6bbc3d5f8aa2280408869"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77b1f7c6c08ec319b7882c1a7c7304731530923532b3243060e6e64c456cf34"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a424fd9a3253d0322d53be7bbb20b5b01511706a61efadcf37f416da325e3d48"}, + {file = "pyarrow-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c958cf3a4a9eee09e1063c02b89e882d19c61b3a2ce6cbd55191a6f45ed5004b"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:0e0ef24b316c544f4bb56f5c376129097df3739e665feca0eb567f716d45c55a"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c13ec3b26b3b069d673c5fa3a0c70c38f0d5c94686ac5dbc9d7e7d24040f812"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71891049dc58039a9523e1cb0d921be001dacb2b327fa7b62a35b96a3aad9f0d"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:943141dd8cca6c5722552a0b11a3c2e791cdf85f1768dea8170b0a8a7e824ff9"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fd077c06061b8fa8fdf91591a4270e368f63cf73c6ab56924d3b64efa96a873"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5308f4bb770b48e07c8cff36cf6a4452862e8ce9492428ad5581d846420b3884"}, + {file = "pyarrow-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:cde4f711cd9476d4da18128c3a40cb529b6b7d2679aee6e0576212547530fef1"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:b8628269bd9289cae0ea668f5900451043252fe3666667f614e140084dd31aac"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:981ccdf4f2696550733e18da882469893d2f33f55f3cbeb6a90f81741cbf67aa"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:954326b426eec6e31ff55209f8840b54d788420e96c4005aaa7beed1fe60b42d"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b6483bf6b61fe9a046235e4ad4d9286b707607878d7dbdc2eb85a6ec4090baf"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7ecad40a1d4e0104cd87757a403f36850261e7a989cf9e4cb3e30420bbbd1092"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c752fb41921d0064568a15a87dbb0222cfbe9040d4b2c1b306fe6e0a453530"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:725d3fe49dfe392ff14a8ae6a75b230a60e8985f2b621b18cfa912fe02b65f1a"}, + {file = "pyarrow-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:2403c8af207262ce8e2bc1a9d19313941fd2e424f1cb3c4b749c17efe1fd699a"}, + {file = "pyarrow-6.0.1.tar.gz", hash = "sha256:423990d56cd8f12283b67367d48e142739b789085185018eb03d05087c3c8d43"}, ] pycodestyle = [ {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, @@ -3337,8 +2944,8 @@ pygments = [ {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pylint = [ - {file = "pylint-2.11.1-py3-none-any.whl", hash = "sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126"}, - {file = "pylint-2.11.1.tar.gz", hash = "sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"}, + {file = "pylint-2.12.1-py3-none-any.whl", hash = "sha256:b4b5a7b6d04e914a11c198c816042af1fb2d3cda29bb0c98a9c637010da2a5c5"}, + {file = "pylint-2.12.1.tar.gz", hash = "sha256:4f4a52b132c05b49094b28e109febcec6bfb7bc6961c7485a5ad0a0f961df289"}, ] pymysql = [ {file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"}, @@ -3360,8 +2967,8 @@ pyodbc = [ {file = "pyodbc-4.0.32.tar.gz", hash = "sha256:9be5f0c3590655e1968488410fe3528bb8023d527e7ccec1f663d64245071a6b"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pyrsistent = [ {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, @@ -3438,12 +3045,12 @@ pywin32 = [ {file = "pywin32-302-cp39-cp39-win_amd64.whl", hash = "sha256:af5aea18167a31efcacc9f98a2ca932c6b6a6d91ebe31f007509e293dea12580"}, ] pywinpty = [ - {file = "pywinpty-1.1.5-cp310-none-win_amd64.whl", hash = "sha256:59e38276f732121b7b708b488055132c695ab7f8790b6ebee9b5b277e30c40e1"}, - {file = "pywinpty-1.1.5-cp36-none-win_amd64.whl", hash = "sha256:0f73bea7f4ecc4711d3706bb0adea0b426c384ff38b619e169d58e20bc307eb0"}, - {file = "pywinpty-1.1.5-cp37-none-win_amd64.whl", hash = "sha256:4cefeef61ab82e9e2bfe228d83a49117e33899931766dd18d576ea5c9187c1e0"}, - {file = "pywinpty-1.1.5-cp38-none-win_amd64.whl", hash = "sha256:44c78a9a74f1b6bff957f8b0acad0525f48f716ac61fd9d39e1eb6f87f1a46a0"}, - {file = "pywinpty-1.1.5-cp39-none-win_amd64.whl", hash = "sha256:ad12ddf276446e0440a760b7c0ba128d39602bc8e6641e0ef8447f1a466a8346"}, - {file = "pywinpty-1.1.5.tar.gz", hash = "sha256:92125f0f8e4e64bb5f3bf270a182c9206dc1765542c59bc07441908a9db17504"}, + {file = "pywinpty-1.1.6-cp310-none-win_amd64.whl", hash = "sha256:5f526f21b569b5610a61e3b6126259c76da979399598e5154498582df3736ade"}, + {file = "pywinpty-1.1.6-cp36-none-win_amd64.whl", hash = "sha256:7576e14f42b31fa98b62d24ded79754d2ea4625570c016b38eb347ce158a30f2"}, + {file = "pywinpty-1.1.6-cp37-none-win_amd64.whl", hash = "sha256:979ffdb9bdbe23db3f46fc7285fd6dbb86b80c12325a50582b211b3894072354"}, + {file = "pywinpty-1.1.6-cp38-none-win_amd64.whl", hash = "sha256:2308b1fc77545427610a705799d4ead5e7f00874af3fb148a03e202437456a7e"}, + {file = "pywinpty-1.1.6-cp39-none-win_amd64.whl", hash = "sha256:c703bf569a98ab7844b9daf37e88ab86f31862754ef6910a8b3824993a525c72"}, + {file = "pywinpty-1.1.6.tar.gz", hash = "sha256:8808f07350c709119cc4464144d6e749637f98e15acc1e5d3c37db1953d2eebc"}, ] pyzmq = [ {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"}, @@ -3485,58 +3092,83 @@ pyzmq = [ {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, ] redshift-connector = [ - {file = "redshift_connector-2.0.889-py3-none-any.whl", hash = "sha256:9f58781f8229c6684aa748a3832c11b8e638a5c9e74df4322c056d95e3785dbc"}, + {file = "redshift_connector-2.0.900-py3-none-any.whl", hash = "sha256:881c1b693b430b7637e94cb75a8bcdf0dfe708b427e61a938cf1e8ed3ff5fe6b"}, ] regex = [ - {file = "regex-2021.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:897c539f0f3b2c3a715be651322bef2167de1cdc276b3f370ae81a3bda62df71"}, - {file = "regex-2021.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:886f459db10c0f9d17c87d6594e77be915f18d343ee138e68d259eb385f044a8"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075b0fdbaea81afcac5a39a0d1bb91de887dd0d93bf692a5dd69c430e7fc58cb"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6238d30dcff141de076344cf7f52468de61729c2f70d776fce12f55fe8df790"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fab29411d75c2eb48070020a40f80255936d7c31357b086e5931c107d48306e"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0148988af0182a0a4e5020e7c168014f2c55a16d11179610f7883dd48ac0ebe"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be30cd315db0168063a1755fa20a31119da91afa51da2907553493516e165640"}, - {file = "regex-2021.11.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e9cec3a62d146e8e122d159ab93ac32c988e2ec0dcb1e18e9e53ff2da4fbd30c"}, - {file = "regex-2021.11.2-cp310-cp310-win32.whl", hash = "sha256:41c66bd6750237a8ed23028a6c9173dc0c92dc24c473e771d3bfb9ee817700c3"}, - {file = "regex-2021.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:0075fe4e2c2720a685fef0f863edd67740ff78c342cf20b2a79bc19388edf5db"}, - {file = "regex-2021.11.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0ed3465acf8c7c10aa2e0f3d9671da410ead63b38a77283ef464cbb64275df58"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab1fea8832976ad0bebb11f652b692c328043057d35e9ebc78ab0a7a30cf9a70"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb1e44d860345ab5d4f533b6c37565a22f403277f44c4d2d5e06c325da959883"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9486ebda015913909bc28763c6b92fcc3b5e5a67dee4674bceed112109f5dfb8"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20605bfad484e1341b2cbfea0708e4b211d233716604846baa54b94821f487cb"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f20f9f430c33597887ba9bd76635476928e76cad2981643ca8be277b8e97aa96"}, - {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d85ca137756d62c8138c971453cafe64741adad1f6a7e63a22a5a8abdbd19fa"}, - {file = "regex-2021.11.2-cp36-cp36m-win32.whl", hash = "sha256:af23b9ca9a874ef0ec20e44467b8edd556c37b0f46f93abfa93752ea7c0e8d1e"}, - {file = "regex-2021.11.2-cp36-cp36m-win_amd64.whl", hash = "sha256:070336382ca92c16c45b4066c4ba9fa83fb0bd13d5553a82e07d344df8d58a84"}, - {file = "regex-2021.11.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ef4e53e2fdc997d91f5b682f81f7dc9661db9a437acce28745d765d251902d85"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35ed5714467fc606551db26f80ee5d6aa1f01185586a7bccd96f179c4b974a11"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee36d5113b6506b97f45f2e8447cb9af146e60e3f527d93013d19f6d0405f3b"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4fba661a4966adbd2c3c08d3caad6822ecb6878f5456588e2475ae23a6e47929"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77f9d16f7970791f17ecce7e7f101548314ed1ee2583d4268601f30af3170856"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6a28e87ba69f3a4f30d775b179aac55be1ce59f55799328a0d9b6df8f16b39d"}, - {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9267e4fba27e6dd1008c4f2983cc548c98b4be4444e3e342db11296c0f45512f"}, - {file = "regex-2021.11.2-cp37-cp37m-win32.whl", hash = "sha256:d4bfe3bc3976ccaeb4ae32f51e631964e2f0e85b2b752721b7a02de5ce3b7f27"}, - {file = "regex-2021.11.2-cp37-cp37m-win_amd64.whl", hash = "sha256:2bb7cae741de1aa03e3dd3a7d98c304871eb155921ca1f0d7cc11f5aade913fd"}, - {file = "regex-2021.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:23f93e74409c210de4de270d4bf88fb8ab736a7400f74210df63a93728cf70d6"}, - {file = "regex-2021.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8ee91e1c295beb5c132ebd78616814de26fedba6aa8687ea460c7f5eb289b72"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e3ff69ab203b54ce5c480c3ccbe959394ea5beef6bd5ad1785457df7acea92e"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3c00cb5c71da655e1e5161481455479b613d500dd1bd252aa01df4f037c641f"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf35e16f4b639daaf05a2602c1b1d47370e01babf9821306aa138924e3fe92"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb11c982a849dc22782210b01d0c1b98eb3696ce655d58a54180774e4880ac66"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e3755e0f070bc31567dfe447a02011bfa8444239b3e9e5cca6773a22133839"}, - {file = "regex-2021.11.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0621c90f28d17260b41838b22c81a79ff436141b322960eb49c7b3f91d1cbab6"}, - {file = "regex-2021.11.2-cp38-cp38-win32.whl", hash = "sha256:8fbe1768feafd3d0156556677b8ff234c7bf94a8110e906b2d73506f577a3269"}, - {file = "regex-2021.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:f9ee98d658a146cb6507be720a0ce1b44f2abef8fb43c2859791d91aace17cd5"}, - {file = "regex-2021.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3794cea825f101fe0df9af8a00f9fad8e119c91e39a28636b95ee2b45b6c2e5"}, - {file = "regex-2021.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3576e173e7b4f88f683b4de7db0c2af1b209bb48b2bf1c827a6f3564fad59a97"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4f4810117a9072a5aa70f7fea5f86fa9efbe9a798312e0a05044bd707cc33"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f5930d334c2f607711d54761956aedf8137f83f1b764b9640be21d25a976f3a4"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:956187ff49db7014ceb31e88fcacf4cf63371e6e44d209cf8816cd4a2d61e11a"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e095f7f96a4b9f24b93c2c915f31a5201a6316618d919b0593afb070a5270e"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a56735c35a3704603d9d7b243ee06139f0837bcac2171d9ba1d638ce1df0742a"}, - {file = "regex-2021.11.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adf35d88d9cffc202e6046e4c32e1e11a1d0238b2fcf095c94f109e510ececea"}, - {file = "regex-2021.11.2-cp39-cp39-win32.whl", hash = "sha256:30fe317332de0e50195665bc61a27d46e903d682f94042c36b3f88cb84bd7958"}, - {file = "regex-2021.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:85289c25f658e3260b00178757c87f033f3d4b3e40aa4abdd4dc875ff11a94fb"}, - {file = "regex-2021.11.2.tar.gz", hash = "sha256:5e85dcfc5d0f374955015ae12c08365b565c6f1eaf36dd182476a4d8e5a1cdb7"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9ed0b1e5e0759d6b7f8e2f143894b2a7f3edd313f38cf44e1e15d360e11749b"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:473e67837f786404570eae33c3b64a4b9635ae9f00145250851a1292f484c063"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2fee3ed82a011184807d2127f1733b4f6b2ff6ec7151d83ef3477f3b96a13d03"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d5fd67df77bab0d3f4ea1d7afca9ef15c2ee35dfb348c7b57ffb9782a6e4db6e"}, + {file = "regex-2021.11.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5d408a642a5484b9b4d11dea15a489ea0928c7e410c7525cd892f4d04f2f617b"}, + {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, + {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, + {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74cbeac0451f27d4f50e6e8a8f3a52ca074b5e2da9f7b505c4201a57a8ed6286"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:3598893bde43091ee5ca0a6ad20f08a0435e93a69255eeb5f81b85e81e329264"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:50a7ddf3d131dc5633dccdb51417e2d1910d25cbcf842115a3a5893509140a3a"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:61600a7ca4bcf78a96a68a27c2ae9389763b5b94b63943d5158f2a377e09d29a"}, + {file = "regex-2021.11.10-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:563d5f9354e15e048465061509403f68424fef37d5add3064038c2511c8f5e00"}, + {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, + {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, + {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:42b50fa6666b0d50c30a990527127334d6b96dd969011e843e726a64011485da"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6e1d2cc79e8dae442b3fa4a26c5794428b98f81389af90623ffcc650ce9f6732"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:0416f7399e918c4b0e074a0f66e5191077ee2ca32a0f99d4c187a62beb47aa05"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ce298e3d0c65bd03fa65ffcc6db0e2b578e8f626d468db64fdf8457731052942"}, + {file = "regex-2021.11.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dc07f021ee80510f3cd3af2cad5b6a3b3a10b057521d9e6aaeb621730d320c5a"}, + {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, + {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f5be7805e53dafe94d295399cfbe5227f39995a997f4fd8539bf3cbdc8f47ca8"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a955b747d620a50408b7fdf948e04359d6e762ff8a85f5775d907ceced715129"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:139a23d1f5d30db2cc6c7fd9c6d6497872a672db22c4ae1910be22d4f4b2068a"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ca49e1ab99593438b204e00f3970e7a5f70d045267051dfa6b5f4304fcfa1dbf"}, + {file = "regex-2021.11.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:96fc32c16ea6d60d3ca7f63397bff5c75c5a562f7db6dec7d412f7c4d2e78ec0"}, + {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, + {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd410a1cbb2d297c67d8521759ab2ee3f1d66206d2e4328502a487589a2cb21b"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e6096b0688e6e14af6a1b10eaad86b4ff17935c49aa774eac7c95a57a4e8c296"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:529801a0d58809b60b3531ee804d3e3be4b412c94b5d267daa3de7fadef00f49"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f594b96fe2e0821d026365f72ac7b4f0b487487fb3d4aaf10dd9d97d88a9737"}, + {file = "regex-2021.11.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2409b5c9cef7054dde93a9803156b411b677affc84fca69e908b1cb2c540025d"}, + {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, + {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, + {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, ] requests = [ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, @@ -3547,19 +3179,19 @@ requests-aws4auth = [ {file = "requests_aws4auth-1.1.1-py2.py3-none-any.whl", hash = "sha256:dfd9f930ffde48a756b72b55698a8522875ea6358dcffbcc44a66700ace31783"}, ] responses = [ - {file = "responses-0.15.0-py2.py3-none-any.whl", hash = "sha256:5955ad3468fe8eb5fb736cdab4943457b7768f8670fa3624b4e26ff52dfe20c0"}, - {file = "responses-0.15.0.tar.gz", hash = "sha256:866757987d1962aa908d9c8b3185739faefd72a359e95459de0c2e4e5369c9b2"}, + {file = "responses-0.16.0-py2.py3-none-any.whl", hash = "sha256:f358ef75e8bf431b0aa203cc62625c3a1c80a600dbe9de91b944bf4e9c600b92"}, + {file = "responses-0.16.0.tar.gz", hash = "sha256:a2e3aca2a8277e61257cd3b1c154b1dd0d782b1ae3d38b7fa37cbe3feb531791"}, ] restructuredtext-lint = [ {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, ] s3fs = [ - {file = "s3fs-2021.10.1-py3-none-any.whl", hash = "sha256:3ae3fc7e51f6899a90adf0e35459c5ead993bea1f7d2ba703086c03e5523ea40"}, - {file = "s3fs-2021.10.1.tar.gz", hash = "sha256:493ae25053e5262552a247a9f1c3a2c8fbcd20f5907fce63a749126ba58fe05e"}, + {file = "s3fs-0.4.2-py3-none-any.whl", hash = "sha256:91c1dfb45e5217bd441a7a560946fe865ced6225ff7eb0fb459fe6e601a95ed3"}, + {file = "s3fs-0.4.2.tar.gz", hash = "sha256:2ca5de8dc18ad7ad350c0bd01aef0406aa5d0fff78a561f0f710f9d9858abdd0"}, ] s3transfer = [ - {file = "s3transfer-0.4.2-py2.py3-none-any.whl", hash = "sha256:9b3752887a2880690ce628bc263d6d13a3864083aeacff4890c1c9839a5eb0bc"}, - {file = "s3transfer-0.4.2.tar.gz", hash = "sha256:cb022f4b16551edebbb31a377d3f09600dbada7363d8c5db7976e7f47732e1b2"}, + {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, + {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, ] scramp = [ {file = "scramp-1.4.1-py3-none-any.whl", hash = "sha256:93c9cc2ffe54a451e02981c07a5a23cbd830701102789939cfb4ff91efd6ca8c"}, @@ -3578,16 +3210,16 @@ sniffio = [ {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, - {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] soupsieve = [ - {file = "soupsieve-2.3-py3-none-any.whl", hash = "sha256:617ffc4d0dfd39c66f4d1413a6e165663a34eca86be9b54f97b91756300ff6df"}, - {file = "soupsieve-2.3.tar.gz", hash = "sha256:e4860f889dfa88774c07da0b276b70c073b6470fa1a4a8350800bb7bce3dcc76"}, + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, ] sphinx = [ - {file = "Sphinx-4.2.0-py3-none-any.whl", hash = "sha256:98a535c62a4fcfcc362528592f69b26f7caec587d32cd55688db580be0287ae0"}, - {file = "Sphinx-4.2.0.tar.gz", hash = "sha256:94078db9184491e15bce0a56d9186e0aec95f16ac20b12d00e06d4e36f1058a6"}, + {file = "Sphinx-4.3.1-py3-none-any.whl", hash = "sha256:048dac56039a5713f47a554589dc98a442b39226a2b9ed7f82797fcb2fe9253f"}, + {file = "Sphinx-4.3.1.tar.gz", hash = "sha256:32a5b3e9a1b176cc25ed048557d4d3d01af635e6b76c5bc7a43b0a34447fbd45"}, ] sphinx-bootstrap-theme = [ {file = "sphinx-bootstrap-theme-0.8.0.tar.gz", hash = "sha256:038ee7e89478e064b5dd7e614de6f3f4cec81d9f9efbebb06e105693d6a50924"}, @@ -3721,9 +3353,8 @@ typed-ast = [ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, - {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, - {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] urllib3 = [ {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, @@ -3814,80 +3445,6 @@ xmltodict = [ {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, ] -yarl = [ - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, - {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, - {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, - {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, - {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, - {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, - {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, - {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, - {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, - {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, - {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, - {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, - {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, - {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, -] zipp = [ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, diff --git a/pyproject.toml b/pyproject.toml index d08de9a9f..1b67e7423 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,8 +28,8 @@ classifiers = [ [tool.poetry.dependencies] python = ">=3.6.2, <=3.10.0" -boto3 = "^1.16.8" -botocore = "^1.19.8" +boto3 = "^1.20.17" +botocore = "^1.23.17" # python_full_version instead of just python is needed until the changes # from https://github.com/python-poetry/poetry-core/pull/180 are released pandas = [ @@ -76,7 +76,7 @@ nbsphinx-link = "^1.3.0" IPython = "^7.16.0" moto = "^2.2.12" jupyterlab = "^3.1.4" -s3fs = "^2021.10.0" +s3fs = "0.4.2" python-Levenshtein = "^0.12.2" bump2version = "^1.0.1" diff --git a/test_infra/app.py b/test_infra/app.py index 8c3395e22..3356f457b 100644 --- a/test_infra/app.py +++ b/test_infra/app.py @@ -2,11 +2,13 @@ from aws_cdk import core as cdk from stacks.base_stack import BaseStack from stacks.databases_stack import DatabasesStack +from stacks.lakeformation_stack import LakeFormationStack from stacks.opensearch_stack import OpenSearchStack app = cdk.App() base = BaseStack(app, "aws-data-wrangler-base") + DatabasesStack( app, "aws-data-wrangler-databases", @@ -15,6 +17,8 @@ base.get_key, ) +LakeFormationStack(app, "aws-data-wrangler-lakeformation") + OpenSearchStack( app, "aws-data-wrangler-opensearch", diff --git a/test_infra/poetry.lock b/test_infra/poetry.lock index aa17ff35f..164515b8b 100644 --- a/test_infra/poetry.lock +++ b/test_infra/poetry.lock @@ -14,625 +14,639 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "aws-cdk.assets" -version = "1.124.0" +version = "1.130.0" description = "This module is deprecated. All types are now available under the core module" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-applicationautoscaling" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::ApplicationAutoScaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-autoscaling-common" = "1.124.0" -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-autoscaling-common" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-autoscaling-common" -version = "1.124.0" +version = "1.130.0" description = "Common implementation package for @aws-cdk/aws-autoscaling and @aws-cdk/aws-applicationautoscaling" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-certificatemanager" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::CertificateManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-lambda" = "1.124.0" -"aws-cdk.aws-route53" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-lambda" = "1.130.0" +"aws-cdk.aws-route53" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudformation" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::CloudFormation" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-lambda" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-sns" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-lambda" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-sns" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-cloudwatch" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::CloudWatch" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codeguruprofiler" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::CodeGuruProfiler" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-codestarnotifications" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::CodeStarNotifications" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.124.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ec2" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::EC2" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-s3-assets" = "1.124.0" -"aws-cdk.aws-ssm" = "1.124.0" -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" -"aws-cdk.region-info" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-s3-assets" = "1.130.0" +"aws-cdk.aws-ssm" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.region-info" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ecr-assets" -version = "1.124.0" +version = "1.130.0" description = "Docker image assets deployed to ECR" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.124.0" -"aws-cdk.aws-ecr" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.assets" = "1.130.0" +"aws-cdk.aws-ecr" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-efs" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::EFS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-events" -version = "1.124.0" +version = "1.130.0" description = "Amazon EventBridge Construct Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-glue" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Glue" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.124.0" -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-s3-assets" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.assets" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-s3-assets" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-iam" -version = "1.124.0" +version = "1.130.0" description = "CDK routines for easily assigning correct and minimal IAM permissions" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.124.0" -"aws-cdk.region-info" = "1.124.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.region-info" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-kms" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::KMS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" +publication = ">=0.0.3" + +[[package]] +name = "aws-cdk.aws-lakeformation" +version = "1.130.0" +description = "The CDK Construct Library for AWS::LakeFormation" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +"aws-cdk.core" = "1.130.0" +constructs = ">=3.3.69,<4.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-lambda" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Lambda" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-applicationautoscaling" = "1.124.0" -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-codeguruprofiler" = "1.124.0" -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-ecr" = "1.124.0" -"aws-cdk.aws-ecr-assets" = "1.124.0" -"aws-cdk.aws-efs" = "1.124.0" -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-s3-assets" = "1.124.0" -"aws-cdk.aws-signer" = "1.124.0" -"aws-cdk.aws-sqs" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" -"aws-cdk.region-info" = "1.124.0" +"aws-cdk.aws-applicationautoscaling" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-codeguruprofiler" = "1.130.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-ecr" = "1.130.0" +"aws-cdk.aws-ecr-assets" = "1.130.0" +"aws-cdk.aws-efs" = "1.130.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-s3-assets" = "1.130.0" +"aws-cdk.aws-signer" = "1.130.0" +"aws-cdk.aws-sqs" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.region-info" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-logs" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Logs" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-s3-assets" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-s3-assets" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-opensearchservice" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::OpenSearchService" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-certificatemanager" = "1.124.0" -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-route53" = "1.124.0" -"aws-cdk.aws-secretsmanager" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.custom-resources" = "1.124.0" +"aws-cdk.aws-certificatemanager" = "1.130.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-route53" = "1.130.0" +"aws-cdk.aws-secretsmanager" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.custom-resources" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-rds" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::RDS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-secretsmanager" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-secretsmanager" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-redshift" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Redshift" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-lambda" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.aws-secretsmanager" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.custom-resources" = "1.124.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-lambda" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.aws-secretsmanager" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.custom-resources" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-route53" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Route53" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.custom-resources" = "1.124.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.custom-resources" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-s3-assets" -version = "1.124.0" +version = "1.130.0" description = "Deploy local files and directories to S3" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.assets" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-s3" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.assets" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-s3" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sam" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for the AWS Serverless Application Model (SAM) resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.124.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-secretsmanager" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::SecretsManager" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-lambda" = "1.124.0" -"aws-cdk.aws-sam" = "1.124.0" -"aws-cdk.core" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-lambda" = "1.130.0" +"aws-cdk.aws-sam" = "1.130.0" +"aws-cdk.core" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-signer" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::Signer" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.core" = "1.124.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sns" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::SNS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-codestarnotifications" = "1.124.0" -"aws-cdk.aws-events" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.aws-sqs" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-codestarnotifications" = "1.130.0" +"aws-cdk.aws-events" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.aws-sqs" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-sqs" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::SQS" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudwatch" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-cloudwatch" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.aws-ssm" -version = "1.124.0" +version = "1.130.0" description = "The CDK Construct Library for AWS::SSM" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-kms" = "1.124.0" -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-kms" = "1.130.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cloud-assembly-schema" -version = "1.124.0" +version = "1.130.0" description = "Cloud Assembly Schema" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.core" -version = "1.124.0" +version = "1.130.0" description = "AWS Cloud Development Kit Core Library" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.124.0" -"aws-cdk.cx-api" = "1.124.0" -"aws-cdk.region-info" = "1.124.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +"aws-cdk.cx-api" = "1.130.0" +"aws-cdk.region-info" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.custom-resources" -version = "1.124.0" +version = "1.130.0" description = "Constructs for implementing CDK custom resources" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.aws-cloudformation" = "1.124.0" -"aws-cdk.aws-ec2" = "1.124.0" -"aws-cdk.aws-iam" = "1.124.0" -"aws-cdk.aws-lambda" = "1.124.0" -"aws-cdk.aws-logs" = "1.124.0" -"aws-cdk.aws-sns" = "1.124.0" -"aws-cdk.core" = "1.124.0" +"aws-cdk.aws-cloudformation" = "1.130.0" +"aws-cdk.aws-ec2" = "1.130.0" +"aws-cdk.aws-iam" = "1.130.0" +"aws-cdk.aws-lambda" = "1.130.0" +"aws-cdk.aws-logs" = "1.130.0" +"aws-cdk.aws-sns" = "1.130.0" +"aws-cdk.core" = "1.130.0" constructs = ">=3.3.69,<4.0.0" -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.cx-api" -version = "1.124.0" +version = "1.130.0" description = "Cloud executable protocol" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -"aws-cdk.cloud-assembly-schema" = "1.124.0" -jsii = ">=1.34.0,<2.0.0" +"aws-cdk.cloud-assembly-schema" = "1.130.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "aws-cdk.region-info" -version = "1.124.0" +version = "1.130.0" description = "AWS region information, such as service principal names" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.34.0,<2.0.0" +jsii = ">=1.41.0,<2.0.0" publication = ">=0.0.3" [[package]] @@ -662,19 +676,19 @@ attrs = ">=20" [[package]] name = "constructs" -version = "3.3.101" +version = "3.3.161" description = "A programming model for composable configuration" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -jsii = ">=1.32.0,<2.0.0" +jsii = ">=1.37.0,<2.0.0" publication = ">=0.0.3" [[package]] name = "importlib-resources" -version = "5.2.0" +version = "5.4.0" description = "Read resources from Python packages" category = "main" optional = false @@ -685,11 +699,11 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "jsii" -version = "1.34.0" +version = "1.42.0" description = "Python client for jsii runtime" category = "main" optional = false @@ -734,7 +748,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "typing-extensions" -version = "3.10.0.0" +version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "main" optional = false @@ -742,7 +756,7 @@ python-versions = "*" [[package]] name = "zipp" -version = "3.5.0" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -755,7 +769,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [metadata] lock-version = "1.1" python-versions = ">=3.6.2, <3.10" -content-hash = "6d95fccb052c85375178aa3ade72de9e4ee87c009d7e067dd7d4120c23ded9f5" +content-hash = "6d22ad86171a44206a94d9e9d051c12bb4caf0215a7af535ae5e7d371011afc1" [metadata.files] attrs = [ @@ -763,144 +777,148 @@ attrs = [ {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] "aws-cdk.assets" = [ - {file = "aws-cdk.assets-1.124.0.tar.gz", hash = "sha256:8097177806b29824a69bbdb5df9ec74f7b360708b51ed860613d38e30414054a"}, - {file = "aws_cdk.assets-1.124.0-py3-none-any.whl", hash = "sha256:c94b63e36c094111c6a9abb2a9d6c694f3e123034cf5dc23e5293fdc32c44fb3"}, + {file = "aws-cdk.assets-1.130.0.tar.gz", hash = "sha256:89628550ecfd4f2b3713cc515c5937ee766cc68cd39fc65dc15095a4fc92140f"}, + {file = "aws_cdk.assets-1.130.0-py3-none-any.whl", hash = "sha256:88ee75118c7b34506acac8a3390e0f6360227f95764749ecf0cb8160532fef8d"}, ] "aws-cdk.aws-applicationautoscaling" = [ - {file = "aws-cdk.aws-applicationautoscaling-1.124.0.tar.gz", hash = "sha256:c3bc89c2754b7ce029c667be9ab1633884bf574d33773a1dc07a3cff1b698670"}, - {file = "aws_cdk.aws_applicationautoscaling-1.124.0-py3-none-any.whl", hash = "sha256:d0dcc91b3de13ad46b874813877af3746adec3ad9f7380b2408a14cdd848b65c"}, + {file = "aws-cdk.aws-applicationautoscaling-1.130.0.tar.gz", hash = "sha256:c60000e6a2b86392efcfb32066207cff19adbdbe0b68e1ee4281cf5b52255b29"}, + {file = "aws_cdk.aws_applicationautoscaling-1.130.0-py3-none-any.whl", hash = "sha256:0bed99bbc03ae733450e03bd0c6b075fadc13ae0d9363fffa047e6de0d68be60"}, ] "aws-cdk.aws-autoscaling-common" = [ - {file = "aws-cdk.aws-autoscaling-common-1.124.0.tar.gz", hash = "sha256:03f57fcd34d9e370c0929de63c674bdbf2a8fbe2efed40942e0e2bff1ed1d436"}, - {file = "aws_cdk.aws_autoscaling_common-1.124.0-py3-none-any.whl", hash = "sha256:1969320c12bf4107346233b3310464c1e752b65a6577c865abb809711cec2c1f"}, + {file = "aws-cdk.aws-autoscaling-common-1.130.0.tar.gz", hash = "sha256:bdc5eee7f30163daf0a40b78e888d356da9815057153791daa6bc2b3d1288541"}, + {file = "aws_cdk.aws_autoscaling_common-1.130.0-py3-none-any.whl", hash = "sha256:46bd7dffa2ff4bcb2c3ee86e4881d7994924f23f76b59fb346cbc48a7e5b90e4"}, ] "aws-cdk.aws-certificatemanager" = [ - {file = "aws-cdk.aws-certificatemanager-1.124.0.tar.gz", hash = "sha256:291e7c29aa406619276dc141a3827b0af15c9a997b6e7dc1a8c59bbfb3aa7df7"}, - {file = "aws_cdk.aws_certificatemanager-1.124.0-py3-none-any.whl", hash = "sha256:23071000fe931dd817638b059991872fe93a91a1c1d33750f080c536e9aaf302"}, + {file = "aws-cdk.aws-certificatemanager-1.130.0.tar.gz", hash = "sha256:e95cb1c48e5b37afa10ff0bdac0c0793d276f0c00d26140c6707a1fb0db74dc8"}, + {file = "aws_cdk.aws_certificatemanager-1.130.0-py3-none-any.whl", hash = "sha256:23ceb0486f5e17ed230a651401ce9807faf3efffe793b0e1cef7e224f6ed25c9"}, ] "aws-cdk.aws-cloudformation" = [ - {file = "aws-cdk.aws-cloudformation-1.124.0.tar.gz", hash = "sha256:c38efe614113c3bdcb964f6c20742994154392bc78e82c34a299d0f1b26a7c65"}, - {file = "aws_cdk.aws_cloudformation-1.124.0-py3-none-any.whl", hash = "sha256:9b530359f567555b83dfbb99f7112fdb2ad893176032ff542ce09f7454ce5107"}, + {file = "aws-cdk.aws-cloudformation-1.130.0.tar.gz", hash = "sha256:c2176461dfd6bf46ad3143f9ca270e209e74d6a1f8d52e2260f4893b4b9ae228"}, + {file = "aws_cdk.aws_cloudformation-1.130.0-py3-none-any.whl", hash = "sha256:0509fc5b6b6a6bae3752fe04a4b3f24776254e28bc5688238602b904852bd2ec"}, ] "aws-cdk.aws-cloudwatch" = [ - {file = "aws-cdk.aws-cloudwatch-1.124.0.tar.gz", hash = "sha256:221734f8b6f940068714fe00fd68a8a32d767c713b2adb874365482836248f7f"}, - {file = "aws_cdk.aws_cloudwatch-1.124.0-py3-none-any.whl", hash = "sha256:a9a4abf58e31cb53872601296b41cf8e8d5106807a5775d19a6ac05fbe34bef0"}, + {file = "aws-cdk.aws-cloudwatch-1.130.0.tar.gz", hash = "sha256:1034ca75148e8292d014927911ba45cb18fad459371988ed32afd4b9de999449"}, + {file = "aws_cdk.aws_cloudwatch-1.130.0-py3-none-any.whl", hash = "sha256:10cbd1b7047267a6a1f566e7f1bfd1e85932a709bbca5419226d1b84b7e0a0ee"}, ] "aws-cdk.aws-codeguruprofiler" = [ - {file = "aws-cdk.aws-codeguruprofiler-1.124.0.tar.gz", hash = "sha256:e37cd801e5b7fa93a0dba84effc36cd94f090b83988c4f165815ba585f7ca866"}, - {file = "aws_cdk.aws_codeguruprofiler-1.124.0-py3-none-any.whl", hash = "sha256:4d4bd49ea2415d9daf7c3c57403060802e5f523bd476a276f1e00a3e3d73c15d"}, + {file = "aws-cdk.aws-codeguruprofiler-1.130.0.tar.gz", hash = "sha256:b9d9473a3e052e3164759c3f1ee694b7fc9d604c92b4a3df36c31a1a92306917"}, + {file = "aws_cdk.aws_codeguruprofiler-1.130.0-py3-none-any.whl", hash = "sha256:0462eb79554b407bed707eda3f840956ec81d442ddfad4a1e93da20c89152835"}, ] "aws-cdk.aws-codestarnotifications" = [ - {file = "aws-cdk.aws-codestarnotifications-1.124.0.tar.gz", hash = "sha256:478486be7e24e455c1fd8a54489de491005997b6ebdc06212a6231e89471414a"}, - {file = "aws_cdk.aws_codestarnotifications-1.124.0-py3-none-any.whl", hash = "sha256:de73fbcceba282ddf3caf5e74b188e4685108cec845f573986ea3fec1c98beba"}, + {file = "aws-cdk.aws-codestarnotifications-1.130.0.tar.gz", hash = "sha256:3c7f66d4c377e4f509b2719be4a2b1ac6efdbc4ab416eb56947a57ddd9290e27"}, + {file = "aws_cdk.aws_codestarnotifications-1.130.0-py3-none-any.whl", hash = "sha256:89b8a5374616e732475f374acb1f8b26de20721e0d939dda733f7135754848e3"}, ] "aws-cdk.aws-ec2" = [ - {file = "aws-cdk.aws-ec2-1.124.0.tar.gz", hash = "sha256:f7515734cac0ef8eeaa003bef85364c878fad4a90876de313d156cc863199811"}, - {file = "aws_cdk.aws_ec2-1.124.0-py3-none-any.whl", hash = "sha256:d000d22d87d887dfbc61b82be897234fc58f421b2fbbbc29f002b683b4fdac4f"}, + {file = "aws-cdk.aws-ec2-1.130.0.tar.gz", hash = "sha256:e0220bc03d44ad4e7f04c8efacd65c52c32faeac3d62a752d114e5606c47a6c2"}, + {file = "aws_cdk.aws_ec2-1.130.0-py3-none-any.whl", hash = "sha256:fde2b2252debcbdd309a74bf7f3c1b7aaa83a671511eb9f753105687e59cafc3"}, ] "aws-cdk.aws-ecr" = [ - {file = "aws-cdk.aws-ecr-1.124.0.tar.gz", hash = "sha256:cbf940fbb76eb189143df45f67115673faf10a4b8e7f571660822604c9016aad"}, - {file = "aws_cdk.aws_ecr-1.124.0-py3-none-any.whl", hash = "sha256:1661c6f8fd618ac75da7cdefd36adda747218e4fe27faa44b5df62ecabd0b3f3"}, + {file = "aws-cdk.aws-ecr-1.130.0.tar.gz", hash = "sha256:0c3aad603cc3f8e7cf2901d9a1365fe5110ff46f7d739b89333691219b186b92"}, + {file = "aws_cdk.aws_ecr-1.130.0-py3-none-any.whl", hash = "sha256:7a2f8720d2f23c3578979c53f486c66a8449e0fd8135c6e4f82d4bd653151ce9"}, ] "aws-cdk.aws-ecr-assets" = [ - {file = "aws-cdk.aws-ecr-assets-1.124.0.tar.gz", hash = "sha256:b2401b111474413436e664c1652d02d6e053ca946cbbe224a4f9c3c6220005df"}, - {file = "aws_cdk.aws_ecr_assets-1.124.0-py3-none-any.whl", hash = "sha256:7dc6b6f262baffa37df3ed898d8ae74ef2384793be822a91b91159cb512183ff"}, + {file = "aws-cdk.aws-ecr-assets-1.130.0.tar.gz", hash = "sha256:40ca779cde59bdc3fcd979385a2b87b8e5cb052e1a4ef76e43bd781458ea5ce3"}, + {file = "aws_cdk.aws_ecr_assets-1.130.0-py3-none-any.whl", hash = "sha256:ddf5078a87529b4e5c2216bb71579fc0489b4dcdab6e7d5246dd1e1d10263e29"}, ] "aws-cdk.aws-efs" = [ - {file = "aws-cdk.aws-efs-1.124.0.tar.gz", hash = "sha256:90aaccea5ff55ae4a3045540f78e007c048709e142d77947aa15ad655ed4c011"}, - {file = "aws_cdk.aws_efs-1.124.0-py3-none-any.whl", hash = "sha256:282db0bd269535fb19f0101d4fa6b9cb7cf7dcddf2eaf5d04d7f03fef156c9d0"}, + {file = "aws-cdk.aws-efs-1.130.0.tar.gz", hash = "sha256:8ed017fe4599bbfaa03dac74aa41cded39984813c8a6b14e280896aed0c8a39a"}, + {file = "aws_cdk.aws_efs-1.130.0-py3-none-any.whl", hash = "sha256:ccf15abb0711725620d478f7b53e58f2f6109b77f6c47c5878dc00d70e196827"}, ] "aws-cdk.aws-events" = [ - {file = "aws-cdk.aws-events-1.124.0.tar.gz", hash = "sha256:0b6b5ffca233c0b5d7abaf011072ca896463ce391242ffdf7bf4def28dec8213"}, - {file = "aws_cdk.aws_events-1.124.0-py3-none-any.whl", hash = "sha256:92ba680941365de0f90ad7881b8c2e787c50b85a69bc32e82b4578a3276f810f"}, + {file = "aws-cdk.aws-events-1.130.0.tar.gz", hash = "sha256:6ee24457c50eeda8c9c241596cfa6b123bb50ea2138787fef3e4bb54e4b47f13"}, + {file = "aws_cdk.aws_events-1.130.0-py3-none-any.whl", hash = "sha256:df91c72843d9734a49017040090b1615be41de020906a57e6c708d860e8a4139"}, ] "aws-cdk.aws-glue" = [ - {file = "aws-cdk.aws-glue-1.124.0.tar.gz", hash = "sha256:b43f747a2b8480ca848f7ab27b1dd0c7e352c9602fdb039cfc78f5013dbef450"}, - {file = "aws_cdk.aws_glue-1.124.0-py3-none-any.whl", hash = "sha256:d90bc85ae0d6b03536879d6fa72cdc49cfe1d58451b9e0065786b682dc2f9422"}, + {file = "aws-cdk.aws-glue-1.130.0.tar.gz", hash = "sha256:4ddda00ad580ffe207f2241a3cc66ab6c5a225580a9daa6adcd03c3299017d9a"}, + {file = "aws_cdk.aws_glue-1.130.0-py3-none-any.whl", hash = "sha256:93f136d74b866619bd3aec2086b5a2c2b930acfaad7cc23cfa2f0b2a2eb85f90"}, ] "aws-cdk.aws-iam" = [ - {file = "aws-cdk.aws-iam-1.124.0.tar.gz", hash = "sha256:9d779439048832c6f4d5722196a9490d80bb649e56bb4dadc554ea3ae940f797"}, - {file = "aws_cdk.aws_iam-1.124.0-py3-none-any.whl", hash = "sha256:249fc537532f73c3cd3f59dc635be58535d9e9f9418062214eb664e14b59a6be"}, + {file = "aws-cdk.aws-iam-1.130.0.tar.gz", hash = "sha256:d2bf02a2d3f2bd81c1b9598e7b4424b0dc0d4694b57338d7efac43a89fb6409c"}, + {file = "aws_cdk.aws_iam-1.130.0-py3-none-any.whl", hash = "sha256:3a3272745da9363177ebd8b138f42ce9407439f909ed9177c226e584022f4ff0"}, ] "aws-cdk.aws-kms" = [ - {file = "aws-cdk.aws-kms-1.124.0.tar.gz", hash = "sha256:205e79bc8f8e009bd1b5df236f0336e977eb141c70575a42080e36829358215f"}, - {file = "aws_cdk.aws_kms-1.124.0-py3-none-any.whl", hash = "sha256:91294f10f02000743eef712da5ba7ea2749b43e4a0ad7d4715c9c95b6a472c10"}, + {file = "aws-cdk.aws-kms-1.130.0.tar.gz", hash = "sha256:1ece4b6753b0271d9164b32c0c94919e2f2a587677b19c554c2a990b5b0803b7"}, + {file = "aws_cdk.aws_kms-1.130.0-py3-none-any.whl", hash = "sha256:de50127ab5f5f3838b6e4e549696ccfcd2cf18f7edd50616f82b1a0ddcd10075"}, +] +"aws-cdk.aws-lakeformation" = [ + {file = "aws-cdk.aws-lakeformation-1.130.0.tar.gz", hash = "sha256:bdf37b0047ed48c4fa70c5a9398b596f278a73abf4b912b6eb289fa8aeb96ca7"}, + {file = "aws_cdk.aws_lakeformation-1.130.0-py3-none-any.whl", hash = "sha256:5bcd04992577dc2b67d437e0d73b3367e3b57315859a5c9426f15501db049151"}, ] "aws-cdk.aws-lambda" = [ - {file = "aws-cdk.aws-lambda-1.124.0.tar.gz", hash = "sha256:801552637c408a693a7b13967da4ec4e8a623f22b90fb0fdfb845c23765e4e29"}, - {file = "aws_cdk.aws_lambda-1.124.0-py3-none-any.whl", hash = "sha256:50d774d026a8a0ca5089df5c8b2c7cc2ef74db2a4b20c5d049210b154d3af03d"}, + {file = "aws-cdk.aws-lambda-1.130.0.tar.gz", hash = "sha256:c3ee7c637f1a590ead83e75803865f58c0c18193ff841d94b0a0b51ea1e9d6fb"}, + {file = "aws_cdk.aws_lambda-1.130.0-py3-none-any.whl", hash = "sha256:6c8dec3aad5d3900888aab52b0a844d3c05e94f977ff04ec26083302cc76edc8"}, ] "aws-cdk.aws-logs" = [ - {file = "aws-cdk.aws-logs-1.124.0.tar.gz", hash = "sha256:2fba565fc4f12b397bd9df1cd9964c1b35ce1ca65cd618407b6b1777bc43d292"}, - {file = "aws_cdk.aws_logs-1.124.0-py3-none-any.whl", hash = "sha256:1f4b1ff436f2d0663e6c76264d7d6ee9dd0d90f3d9c09e5e93f1b0f31abbc379"}, + {file = "aws-cdk.aws-logs-1.130.0.tar.gz", hash = "sha256:d022ec78f953f1276d710e903ee75857fe86a05b1f44f1610ac4d52b8652ddfc"}, + {file = "aws_cdk.aws_logs-1.130.0-py3-none-any.whl", hash = "sha256:da8ff0e9ed334bb4bc34cac698ad46ae8e815c7e9018e3754c9a342b84f26bbb"}, ] "aws-cdk.aws-opensearchservice" = [ - {file = "aws-cdk.aws-opensearchservice-1.124.0.tar.gz", hash = "sha256:d1bd4ca9ac9cf38b7c04a5e1e63eefe30e6e5e40adc0134e61d468694c71c4b1"}, - {file = "aws_cdk.aws_opensearchservice-1.124.0-py3-none-any.whl", hash = "sha256:170417a55884ac8f26b0ae4cc59c085c8c2a0607b18ca906c1ee4d366b737d85"}, + {file = "aws-cdk.aws-opensearchservice-1.130.0.tar.gz", hash = "sha256:4194f91d28b50a4dc7b97d773871798a79bd93774146cfb8d2fe0ad30030328b"}, + {file = "aws_cdk.aws_opensearchservice-1.130.0-py3-none-any.whl", hash = "sha256:b4bb3b0a80f883aeeae79417ef45c5fc1f46abd05dfa9c46bd02476d5083af39"}, ] "aws-cdk.aws-rds" = [ - {file = "aws-cdk.aws-rds-1.124.0.tar.gz", hash = "sha256:20057fc95cda55fc504987dc0395062836dacc72efce2c86051677a1bb6d8d43"}, - {file = "aws_cdk.aws_rds-1.124.0-py3-none-any.whl", hash = "sha256:bd66c0f76548cee6fb1f100f0e36ab9d5933ef70121b072ae05b3dd26e408ff3"}, + {file = "aws-cdk.aws-rds-1.130.0.tar.gz", hash = "sha256:316abaa5786703bf1459f538d8d1bcc02f5b4c75df320fe2e9d62821f92fa7f4"}, + {file = "aws_cdk.aws_rds-1.130.0-py3-none-any.whl", hash = "sha256:a781ca1b945f655797f06106eb72142be4d1d6b9278e707a29a7e75d7e8dea73"}, ] "aws-cdk.aws-redshift" = [ - {file = "aws-cdk.aws-redshift-1.124.0.tar.gz", hash = "sha256:70cb4700cdfecad592524cd017a4a859b3d4ae407b3d2fcf329022c1d2faf863"}, - {file = "aws_cdk.aws_redshift-1.124.0-py3-none-any.whl", hash = "sha256:4df5c19f74194fb9bd7a56e5b89b9312c35b681a322b0c1b0e248874f628ddc4"}, + {file = "aws-cdk.aws-redshift-1.130.0.tar.gz", hash = "sha256:7447af727af2ff2014aad2d04a96ef70ffc6e65142d575dffb762cd147067e06"}, + {file = "aws_cdk.aws_redshift-1.130.0-py3-none-any.whl", hash = "sha256:e60832a9a042eaeeb646769a40753a82b807dc1154df58c20d524010e361c5b0"}, ] "aws-cdk.aws-route53" = [ - {file = "aws-cdk.aws-route53-1.124.0.tar.gz", hash = "sha256:c5137b3c5211632b931d7b79234aec6006f72701c68477086e70c213320639ef"}, - {file = "aws_cdk.aws_route53-1.124.0-py3-none-any.whl", hash = "sha256:97fe84e53c26c1a713a3b57341c2ecf488db56cc0b6127975656c53206ccd471"}, + {file = "aws-cdk.aws-route53-1.130.0.tar.gz", hash = "sha256:6d1a209505e794922718cbf2f8f432f8d51b305da63ad4f10008b8f1f535f526"}, + {file = "aws_cdk.aws_route53-1.130.0-py3-none-any.whl", hash = "sha256:270877be4a1469f84c3022300baba2b982cd1644b4ea01d65fb0522adcf9b822"}, ] "aws-cdk.aws-s3" = [ - {file = "aws-cdk.aws-s3-1.124.0.tar.gz", hash = "sha256:3047305a4e013cb796532027c14908003ffe7af95fe8e214e3470a32a11c09e6"}, - {file = "aws_cdk.aws_s3-1.124.0-py3-none-any.whl", hash = "sha256:0b08821e3b79c26110068f54aabdb938da55b562dcf2a28a7171d930334ce71a"}, + {file = "aws-cdk.aws-s3-1.130.0.tar.gz", hash = "sha256:940bcb081783937e774cf4f44f77ba7a8211ebe9440cca2d7225b310f4272f79"}, + {file = "aws_cdk.aws_s3-1.130.0-py3-none-any.whl", hash = "sha256:9fac2a150adf92700c05a02c603d0ff1185894235443980fafc874354c380f52"}, ] "aws-cdk.aws-s3-assets" = [ - {file = "aws-cdk.aws-s3-assets-1.124.0.tar.gz", hash = "sha256:568d4c598319e3bf1869536be0586b1004d3c43c2133ba94bf9cda4ad4ae5d5d"}, - {file = "aws_cdk.aws_s3_assets-1.124.0-py3-none-any.whl", hash = "sha256:125c5e3786f2c233512374080553b2a7592efa6a53203764979a1bb987c47338"}, + {file = "aws-cdk.aws-s3-assets-1.130.0.tar.gz", hash = "sha256:db33b348222895ad14cb9d52d5582b1e80d0e9ff008f8c10ea912499ab7c14f1"}, + {file = "aws_cdk.aws_s3_assets-1.130.0-py3-none-any.whl", hash = "sha256:01a5b0f2c759a88176929569c6f69d0efb8901452fe112cfd3b3f4782fec12ab"}, ] "aws-cdk.aws-sam" = [ - {file = "aws-cdk.aws-sam-1.124.0.tar.gz", hash = "sha256:39db01a4d88fd05c57dbc4f0c76c2471eab3e75753febc30f2847c546fa8292b"}, - {file = "aws_cdk.aws_sam-1.124.0-py3-none-any.whl", hash = "sha256:b1ca75d2fb13898ed66cd4ee364cfa0b4f0924ab4583994ec4a7200d10c8c71b"}, + {file = "aws-cdk.aws-sam-1.130.0.tar.gz", hash = "sha256:564877af10684b99a76d7ae83b888f9dfc1f7894caed81d5349a059f51430836"}, + {file = "aws_cdk.aws_sam-1.130.0-py3-none-any.whl", hash = "sha256:dbd38e5e52b5f94aff76bc18640e8ba11ae0d0b183867f747942c753935bf326"}, ] "aws-cdk.aws-secretsmanager" = [ - {file = "aws-cdk.aws-secretsmanager-1.124.0.tar.gz", hash = "sha256:76d3ded9f20d29520d4e54e15c335718cac4f938aacb4827a2a9f98af417576f"}, - {file = "aws_cdk.aws_secretsmanager-1.124.0-py3-none-any.whl", hash = "sha256:0b6ae44966600943eb66fc48a93a0ae2bac60c8d6a5ff9c687ad9675b9f2bc5f"}, + {file = "aws-cdk.aws-secretsmanager-1.130.0.tar.gz", hash = "sha256:96e52bd3e6523b22f1d60aadeb0b6f435a5276a1ec794e4cfe2294f8ac26259a"}, + {file = "aws_cdk.aws_secretsmanager-1.130.0-py3-none-any.whl", hash = "sha256:a929ef9fea760b37d5306a1ee9deeecbac2530ab2ea7ec1fc1085544e6af1ca0"}, ] "aws-cdk.aws-signer" = [ - {file = "aws-cdk.aws-signer-1.124.0.tar.gz", hash = "sha256:96dd4ae63b43c7c12fde59f7ebbbea1895964a5f08c6e2ca4a2a1062abcc2399"}, - {file = "aws_cdk.aws_signer-1.124.0-py3-none-any.whl", hash = "sha256:2fe614e6ce1ea6259d60f3adced41eaefdeace0cf77d961b5fcef815e1f82428"}, + {file = "aws-cdk.aws-signer-1.130.0.tar.gz", hash = "sha256:f453d608a491dd0ff7d97fa597f17480d3bf43a0eaedd975e0846bf03de0ab0d"}, + {file = "aws_cdk.aws_signer-1.130.0-py3-none-any.whl", hash = "sha256:10a5981156c83c8725f565931167b376db24c08d43b325a8ad0e4a10559b32df"}, ] "aws-cdk.aws-sns" = [ - {file = "aws-cdk.aws-sns-1.124.0.tar.gz", hash = "sha256:21e838c52cdd9bdcd98fc0fbe16ffad2bf10ba6bf31c5bfcdd9f49a8b3479d0c"}, - {file = "aws_cdk.aws_sns-1.124.0-py3-none-any.whl", hash = "sha256:cb3820fd79643d1c5fb0b69f2b4755900dd16756af0f4c36706d68220a845d8b"}, + {file = "aws-cdk.aws-sns-1.130.0.tar.gz", hash = "sha256:a2494dd42513b870ef94c0f013e734473fb8a02042b21da5864e3b8bd6609963"}, + {file = "aws_cdk.aws_sns-1.130.0-py3-none-any.whl", hash = "sha256:7b6dfc5c50cdc0005caac683731772502a9d26d6ef415256f21746bef0b7b444"}, ] "aws-cdk.aws-sqs" = [ - {file = "aws-cdk.aws-sqs-1.124.0.tar.gz", hash = "sha256:ffed4754784de29473f554e450c6ec1b96c7508a2706406fe8d6442f2a31c58c"}, - {file = "aws_cdk.aws_sqs-1.124.0-py3-none-any.whl", hash = "sha256:382721ca5d82dce9ec2625e5bae26132151748ee60e1269a0aa91cfd03227ee7"}, + {file = "aws-cdk.aws-sqs-1.130.0.tar.gz", hash = "sha256:baef9bfc74c33ad5e9ff65a4d48477f68fb503950d58d21e9cc657e8a9914c0f"}, + {file = "aws_cdk.aws_sqs-1.130.0-py3-none-any.whl", hash = "sha256:bd40f528012fd38398dd7cc6a8c91c62da634e2e620ecfa6530ae43a5d1890b5"}, ] "aws-cdk.aws-ssm" = [ - {file = "aws-cdk.aws-ssm-1.124.0.tar.gz", hash = "sha256:bcfc99a5cdf23849503c72d93b9e5734d11976453004f13ebca2a66aeb3df10c"}, - {file = "aws_cdk.aws_ssm-1.124.0-py3-none-any.whl", hash = "sha256:4d7335c2ce0200c1ed347422139c9d9b07c71297253ba911470114277996cc76"}, + {file = "aws-cdk.aws-ssm-1.130.0.tar.gz", hash = "sha256:2c0a2e400b82864233e76973020dc16e88afc35aa0ef4dd5250d0404e1236de0"}, + {file = "aws_cdk.aws_ssm-1.130.0-py3-none-any.whl", hash = "sha256:dd84d306f4794433b921f75081d3db41dfe6fdc6078bfa377a096a1457adc9a9"}, ] "aws-cdk.cloud-assembly-schema" = [ - {file = "aws-cdk.cloud-assembly-schema-1.124.0.tar.gz", hash = "sha256:d2989a6742ad988fa0f7085ab67fb7ced14f4c3b1a98cc0bf4a0ea1a9358667c"}, - {file = "aws_cdk.cloud_assembly_schema-1.124.0-py3-none-any.whl", hash = "sha256:77d3f63629b7213c639ffd4c46eb63ce9dd048e9a91a045afa72dcce9576ee6b"}, + {file = "aws-cdk.cloud-assembly-schema-1.130.0.tar.gz", hash = "sha256:31231d1fa14037f2af0a0a27657c7e603103c876464868bb8a5731698dba9d7f"}, + {file = "aws_cdk.cloud_assembly_schema-1.130.0-py3-none-any.whl", hash = "sha256:3eadde99a914ca53e101e66a403b554537435a29e1954cb13e94cdc9305da48a"}, ] "aws-cdk.core" = [ - {file = "aws-cdk.core-1.124.0.tar.gz", hash = "sha256:bbdc1cf5affc34d0caa549771dc6b41ce467744f8ca727b215f0d89b853f4f0c"}, - {file = "aws_cdk.core-1.124.0-py3-none-any.whl", hash = "sha256:56c4549161029c707aa527882e4741fca1ef4c46f63a6417e56e968710cfba7c"}, + {file = "aws-cdk.core-1.130.0.tar.gz", hash = "sha256:d07b98dad35b18481e46b92b6fde7061b76730ac9d1111849db321e519ebdc52"}, + {file = "aws_cdk.core-1.130.0-py3-none-any.whl", hash = "sha256:7b3f1d0e9f83263763694cfb814346c38984041226180fe298056670fa5a5bd9"}, ] "aws-cdk.custom-resources" = [ - {file = "aws-cdk.custom-resources-1.124.0.tar.gz", hash = "sha256:d2be1a1636b65e275521970b9c9accd02718f678ebb074a580b15b695e4b60d5"}, - {file = "aws_cdk.custom_resources-1.124.0-py3-none-any.whl", hash = "sha256:6c9abcc046a92dc6845c8a81e33ac727da95e0c0d95b3fba0d433de7dae10a61"}, + {file = "aws-cdk.custom-resources-1.130.0.tar.gz", hash = "sha256:c212447b64f79d3605db6e072d23acc6fa1135e5399162a8cd258bc1d22e03e2"}, + {file = "aws_cdk.custom_resources-1.130.0-py3-none-any.whl", hash = "sha256:07c8a6c99bfe53d251303a7cf50b109fa974ddfd2fdbd22f3e94534271a2f666"}, ] "aws-cdk.cx-api" = [ - {file = "aws-cdk.cx-api-1.124.0.tar.gz", hash = "sha256:b8ad4e1a2a5545dd256b50d36efb6d59b9b89b4b1034e7b7f9edfdaa476b181b"}, - {file = "aws_cdk.cx_api-1.124.0-py3-none-any.whl", hash = "sha256:64b6f3ba0313cdea9963f9d210932cf770366a9d860520e1f15e64a26e97c5d6"}, + {file = "aws-cdk.cx-api-1.130.0.tar.gz", hash = "sha256:3640cdc3c34566bbd0f32fd899fd5ea969d266d0efcd14f67784e557d2c7192c"}, + {file = "aws_cdk.cx_api-1.130.0-py3-none-any.whl", hash = "sha256:26b425e11e0718f531b6578e0621f141089ec1946ccfa124f929ae932f8340a6"}, ] "aws-cdk.region-info" = [ - {file = "aws-cdk.region-info-1.124.0.tar.gz", hash = "sha256:c28d31226f9000db1375044ea22ba496cc75e8c3db6aa1493a687ff0f89ccdae"}, - {file = "aws_cdk.region_info-1.124.0-py3-none-any.whl", hash = "sha256:594b5f275766b22864e6111f194cfe7a12713ffc61963d063ce06812fa484728"}, + {file = "aws-cdk.region-info-1.130.0.tar.gz", hash = "sha256:f5534c3c02cc25215cca2d74aee4dc70cd34b35d86550415a085db65851b135e"}, + {file = "aws_cdk.region_info-1.130.0-py3-none-any.whl", hash = "sha256:2d4110779dd87f405270bfb31c73f315898698af04ec23b8069cc444d0bd896e"}, ] cattrs = [ {file = "cattrs-1.0.0-py2.py3-none-any.whl", hash = "sha256:616972ae3dfa6e623a40ad3cb845420e64942989152774ab055e5c2b2f89f997"}, @@ -909,16 +927,16 @@ cattrs = [ {file = "cattrs-1.8.0.tar.gz", hash = "sha256:5c121ab06a7cac494813c228721a7feb5a6423b17316eeaebf13f5a03e5b0d53"}, ] constructs = [ - {file = "constructs-3.3.101-py3-none-any.whl", hash = "sha256:0605ea091dda433f0915ba5b3c74bf967d90fb0cf975a5c3b34a7150a3cf48d1"}, - {file = "constructs-3.3.101.tar.gz", hash = "sha256:993fea0b33556e7fa6ebe495493aba379e9f7aa781803df796c5bd08527dbc67"}, + {file = "constructs-3.3.161-py3-none-any.whl", hash = "sha256:3215f2a3628584ad8e6a5ebabf4e1cc0b125367f2347e6fa0d9ccfd735ac2bbb"}, + {file = "constructs-3.3.161.tar.gz", hash = "sha256:2b33c412ff0f1d21205d85f778e4594a35c9c98b65cb47fea7533fbe40de1730"}, ] importlib-resources = [ - {file = "importlib_resources-5.2.0-py3-none-any.whl", hash = "sha256:a0143290bef3cbc99de9e40176e4987780939a955b8632f02ce6c935f42e9bfc"}, - {file = "importlib_resources-5.2.0.tar.gz", hash = "sha256:22a2c42d8c6a1d30aa8a0e1f57293725bfd5c013d562585e46aff469e0ff78b3"}, + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] jsii = [ - {file = "jsii-1.34.0-py3-none-any.whl", hash = "sha256:d0a703d0d44bf78bb90529699599d2a58a68ca764f996808e97eafc68e2467de"}, - {file = "jsii-1.34.0.tar.gz", hash = "sha256:e72ba5fafabdd5b6a3a65bd2cf42302eb87f2fe7c6339bddb808226a91623654"}, + {file = "jsii-1.42.0-py3-none-any.whl", hash = "sha256:29a4c87c8e1ad7eb67b65b03775f37bdd2212088a1eb854e84f5b541b9eaceb4"}, + {file = "jsii-1.42.0.tar.gz", hash = "sha256:44a1874464c3c9b48417523d5a4790ee792dab6e6f522bc6e6e2c84e42417323"}, ] publication = [ {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, @@ -933,11 +951,11 @@ six = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, + {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, + {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, + {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] zipp = [ - {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, - {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/test_infra/pyproject.toml b/test_infra/pyproject.toml index 8cc331b1a..f8dc720fc 100644 --- a/test_infra/pyproject.toml +++ b/test_infra/pyproject.toml @@ -2,7 +2,7 @@ name = "awswrangler - test infrastructure" version = "2.12.1" description = "CDK test infrastructure for AWS" -authors = ["Igor Tavares"] +authors = ["Amazon Web Services"] license = "Apache License 2.0" [tool.poetry.dependencies] @@ -19,3 +19,4 @@ python = ">=3.6.2, <3.10" "aws-cdk.aws-secretsmanager" = "^1.124.0" "aws-cdk.aws-ssm" = "^1.124.0" "aws-cdk.aws-opensearchservice" = "^1.124.0" +"aws-cdk.aws-lakeformation" = "^1.124.0" diff --git a/test_infra/scripts/delete-base.sh b/test_infra/scripts/delete-base.sh deleted file mode 100755 index 1edd3dd27..000000000 --- a/test_infra/scripts/delete-base.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk destroy aws-data-wrangler-base -popd \ No newline at end of file diff --git a/test_infra/scripts/delete-databases.sh b/test_infra/scripts/delete-databases.sh deleted file mode 100755 index 31d97451f..000000000 --- a/test_infra/scripts/delete-databases.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk destroy aws-data-wrangler-databases -popd diff --git a/test_infra/scripts/delete-opensearch.sh b/test_infra/scripts/delete-opensearch.sh deleted file mode 100755 index 1c1c01ba2..000000000 --- a/test_infra/scripts/delete-opensearch.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk destroy aws-data-wrangler-opensearch -popd diff --git a/test_infra/scripts/delete-stack.sh b/test_infra/scripts/delete-stack.sh new file mode 100755 index 000000000..7980db591 --- /dev/null +++ b/test_infra/scripts/delete-stack.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +set -e +STACK=${1} + +pushd .. +cdk destroy aws-data-wrangler-${STACK} +popd \ No newline at end of file diff --git a/test_infra/scripts/deploy-base.sh b/test_infra/scripts/deploy-base.sh deleted file mode 100755 index 5dd7db64f..000000000 --- a/test_infra/scripts/deploy-base.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk bootstrap -cdk deploy aws-data-wrangler-base -popd diff --git a/test_infra/scripts/deploy-databases.sh b/test_infra/scripts/deploy-databases.sh deleted file mode 100755 index b5e1f4208..000000000 --- a/test_infra/scripts/deploy-databases.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk bootstrap -cdk deploy aws-data-wrangler-databases -popd diff --git a/test_infra/scripts/deploy-opensearch.sh b/test_infra/scripts/deploy-opensearch.sh deleted file mode 100755 index e94818af4..000000000 --- a/test_infra/scripts/deploy-opensearch.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -set -e - -pushd .. -cdk bootstrap -cdk deploy aws-data-wrangler-opensearch -popd diff --git a/test_infra/scripts/deploy-stack.sh b/test_infra/scripts/deploy-stack.sh new file mode 100755 index 000000000..cdfbf59ca --- /dev/null +++ b/test_infra/scripts/deploy-stack.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +set -e +STACK=${1} + +pushd .. +cdk bootstrap +cdk deploy aws-data-wrangler-${STACK} +popd \ No newline at end of file diff --git a/test_infra/stacks/base_stack.py b/test_infra/stacks/base_stack.py index cd784b569..0f30db19d 100644 --- a/test_infra/stacks/base_stack.py +++ b/test_infra/stacks/base_stack.py @@ -64,6 +64,7 @@ def __init__(self, scope: cdk.Construct, construct_id: str, **kwargs: str) -> No self, id="aws_data_wrangler_glue_database", database_name="aws_data_wrangler", + location_uri=f"s3://{self.bucket.bucket_name}", ) log_group = logs.LogGroup( self, diff --git a/test_infra/stacks/databases_stack.py b/test_infra/stacks/databases_stack.py index 6d75a6c08..61e1ab688 100644 --- a/test_infra/stacks/databases_stack.py +++ b/test_infra/stacks/databases_stack.py @@ -4,6 +4,7 @@ from aws_cdk import aws_glue as glue from aws_cdk import aws_iam as iam from aws_cdk import aws_kms as kms +from aws_cdk import aws_lakeformation as lf from aws_cdk import aws_rds as rds from aws_cdk import aws_redshift as redshift from aws_cdk import aws_s3 as s3 @@ -60,7 +61,7 @@ def _set_db_infra(self) -> None: self.db_security_group.add_ingress_rule(ec2.Peer.any_ipv4(), ec2.Port.all_traffic()) ssm.StringParameter( self, - "db-secruity-group-parameter", + "db-security-group-parameter", parameter_name="/Wrangler/EC2/DatabaseSecurityGroupId", string_value=self.db_security_group.security_group_id, ) @@ -161,7 +162,12 @@ def _setup_redshift(self) -> None: iam.PolicyStatement( effect=iam.Effect.ALLOW, actions=[ + "lakeformation:GetDataAccess", "lakeformation:GrantPermissions", + "lakeformation:GetWorkUnits", + "lakeformation:StartQueryPlanning", + "lakeformation:GetWorkUnitResults", + "lakeformation:GetQueryState", ], resources=["*"], ) @@ -196,6 +202,20 @@ def _setup_redshift(self) -> None: ), }, ) + lf.CfnPermissions( + self, + "CodeBuildTestRoleLFPermissions", + data_lake_principal=lf.CfnPermissions.DataLakePrincipalProperty( + data_lake_principal_identifier=redshift_role.role_arn + ), + resource=lf.CfnPermissions.ResourceProperty( + table_resource=lf.CfnPermissions.TableResourceProperty( + database_name="aws_data_wrangler", + table_wildcard={}, # type: ignore + ) + ), + permissions=["SELECT", "ALTER", "DESCRIBE", "DROP", "DELETE", "INSERT"], + ) redshift.ClusterSubnetGroup( self, "aws-data-wrangler-redshift-subnet-group", diff --git a/test_infra/stacks/lakeformation_stack.py b/test_infra/stacks/lakeformation_stack.py new file mode 100644 index 000000000..5bc65b4af --- /dev/null +++ b/test_infra/stacks/lakeformation_stack.py @@ -0,0 +1,93 @@ +from aws_cdk import aws_iam as iam +from aws_cdk import aws_lakeformation as lf +from aws_cdk import aws_s3 as s3 +from aws_cdk import core as cdk + + +class LakeFormationStack(cdk.Stack): # type: ignore + def __init__( + self, + scope: cdk.Construct, + construct_id: str, + **kwargs: str, + ) -> None: + """ + AWS Data Wrangler Development LakeFormation Infrastructure. + """ + super().__init__(scope, construct_id, **kwargs) + + self._set_lakeformation_infra() + + def _set_lakeformation_infra(self) -> None: + bucket = s3.Bucket.from_bucket_name( + self, "aws-data-wrangler-bucket", bucket_name=cdk.Fn.import_value("aws-data-wrangler-base-BucketName") + ) + + transaction_role = iam.Role( + self, + "aws-data-wrangler-lf-transaction-role", + assumed_by=iam.ServicePrincipal("lakeformation.amazonaws.com"), + inline_policies={ + "Root": iam.PolicyDocument( + statements=[ + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "s3:DeleteObject", + "s3:GetObject", + "s3:PutObject", + ], + resources=[ + f"{bucket.bucket_arn}/*", + ], + ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "s3:ListBucket", + ], + resources=[ + f"{bucket.bucket_arn}", + ], + ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "execute-api:Invoke", + ], + resources=[ + f"arn:{self.partition}:execute-api:*:*:*/*/POST/reportStatus", + ], + ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "lakeformation:CancelTransaction", + "lakeformation:CommitTransaction", + "lakeformation:GetTableObjects", + "lakeformation:StartTransaction", + "lakeformation:UpdateTableObjects", + ], + resources=["*"], + ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + "glue:GetPartitions", + "glue:GetTable", + "glue:UpdateTable", + ], + resources=["*"], + ), + ] + ), + }, + ) + + lf.CfnResource( + self, + "aws-data-wrangler-bucket-lf-registration", + resource_arn=bucket.bucket_arn, + use_service_linked_role=False, + role_arn=transaction_role.role_arn, + ) diff --git a/tests/_utils.py b/tests/_utils.py index 5f74c4e83..dd348562f 100644 --- a/tests/_utils.py +++ b/tests/_utils.py @@ -17,7 +17,7 @@ CFN_VALID_STATUS = ["CREATE_COMPLETE", "ROLLBACK_COMPLETE", "UPDATE_COMPLETE", "UPDATE_ROLLBACK_COMPLETE"] -def get_df(): +def get_df(governed=False): df = pd.DataFrame( { "iint8": [1, None, 2], @@ -45,10 +45,13 @@ def get_df(): df["float"] = df["float"].astype("float32") df["string"] = df["string"].astype("string") df["category"] = df["category"].astype("category") + + if governed: + df = df.drop(["iint8", "binary"], axis=1) # tinyint & binary currently not supported return df -def get_df_list(): +def get_df_list(governed=False): df = pd.DataFrame( { "iint8": [1, None, 2], @@ -79,10 +82,13 @@ def get_df_list(): df["float"] = df["float"].astype("float32") df["string"] = df["string"].astype("string") df["category"] = df["category"].astype("category") + + if governed: + df = (df.drop(["iint8", "binary"], axis=1),) # tinyint & binary currently not supported return df -def get_df_cast(): +def get_df_cast(governed=False): df = pd.DataFrame( { "iint8": [None, None, None], @@ -103,6 +109,8 @@ def get_df_cast(): "par1": ["a", "b", "b"], } ) + if governed: + df = (df.drop(["iint8", "binary"], axis=1),) # tinyint & binary currently not supported return df @@ -468,7 +476,7 @@ def ensure_data_types_category(df): assert str(df["par1"].dtype) == "category" -def ensure_data_types_csv(df): +def ensure_data_types_csv(df, governed=False): if "__index_level_0__" in df: assert str(df["__index_level_0__"].dtype).startswith("Int") assert str(df["id"].dtype).startswith("Int") @@ -480,7 +488,10 @@ def ensure_data_types_csv(df): assert str(df["float"].dtype).startswith("float") if "int" in df: assert str(df["int"].dtype).startswith("Int") - assert str(df["date"].dtype) == "object" + if governed: + assert str(df["date"].dtype).startswith("datetime") + else: + assert str(df["date"].dtype) == "object" assert str(df["timestamp"].dtype).startswith("datetime") if "bool" in df: assert str(df["bool"].dtype) == "boolean" diff --git a/tests/test__routines.py b/tests/test__routines.py index f2a61ba0e..64decb8ce 100644 --- a/tests/test__routines.py +++ b/tests/test__routines.py @@ -10,7 +10,8 @@ @pytest.mark.parametrize("use_threads", [True, False]) @pytest.mark.parametrize("concurrent_partitioning", [True, False]) -def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_partitioning): +@pytest.mark.parametrize("table_type", ["EXTERNAL_TABLE", "GOVERNED"]) +def test_routine_0(glue_database, glue_table, table_type, path, use_threads, concurrent_partitioning): # Round 1 - Warm up df = pd.DataFrame({"c0": [0, None]}, dtype="Int64") @@ -21,14 +22,17 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part mode="overwrite", database=glue_database, table=glue_table, + table_type=table_type, description="c0", parameters={"num_cols": str(len(df.columns)), "num_rows": str(len(df.index))}, columns_comments={"c0": "0"}, use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert df.shape == df2.shape assert df.c0.sum() == df2.c0.sum() parameters = wr.catalog.get_table_parameters(glue_database, glue_table) @@ -54,8 +58,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert df.shape == df2.shape assert df.c1.sum() == df2.c1.sum() parameters = wr.catalog.get_table_parameters(glue_database, glue_table) @@ -82,8 +88,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert len(df.columns) == len(df2.columns) assert len(df.index) * 2 == len(df2.index) assert df.c1.sum() + 1 == df2.c1.sum() @@ -110,8 +118,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert len(df2.columns) == 2 assert len(df2.index) == 9 assert df2.c1.sum() == 3 @@ -140,8 +150,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert len(df2.columns) == 3 assert len(df2.index) == 10 assert df2.c1.sum() == 4 @@ -156,14 +168,24 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part assert comments["c2"] == "2!" assert comments["c3"] == "3" - # Round 6 - Overwrite Partitioned + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) + + +@pytest.mark.parametrize("use_threads", [True, False]) +@pytest.mark.parametrize("concurrent_partitioning", [True, False]) +@pytest.mark.parametrize("table_type", ["EXTERNAL_TABLE", "GOVERNED"]) +def test_routine_1(glue_database, glue_table, table_type, path, use_threads, concurrent_partitioning): + + # Round 1 - Overwrite Partitioned df = pd.DataFrame({"c0": ["foo", None], "c1": [0, 1]}) wr.s3.to_parquet( df=df, dataset=True, + path=path, mode="overwrite", database=glue_database, table=glue_table, + table_type=table_type, partition_cols=["c1"], description="c0+c1", parameters={"num_cols": "2", "num_rows": "2"}, @@ -171,8 +193,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert df.shape == df2.shape assert df.c1.sum() == df2.c1.sum() parameters = wr.catalog.get_table_parameters(glue_database, glue_table) @@ -185,11 +209,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part assert comments["c0"] == "zero" assert comments["c1"] == "one" - # Round 7 - Overwrite Partitions + # Round 2 - Overwrite Partitions df = pd.DataFrame({"c0": [None, None], "c1": [0, 2]}) wr.s3.to_parquet( df=df, - path=path, dataset=True, mode="overwrite_partitions", database=glue_database, @@ -201,8 +224,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part concurrent_partitioning=concurrent_partitioning, use_threads=use_threads, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert len(df2.columns) == 2 assert len(df2.index) == 3 assert df2.c1.sum() == 3 @@ -216,7 +241,7 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part assert comments["c0"] == "zero" assert comments["c1"] == "one" - # Round 8 - Overwrite Partitions + New Column + Wrong Type + # Round 3 - Overwrite Partitions + New Column + Wrong Type df = pd.DataFrame({"c0": [1, 2], "c1": ["1", "3"], "c2": [True, False]}) wr.s3.to_parquet( df=df, @@ -231,8 +256,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part use_threads=use_threads, concurrent_partitioning=concurrent_partitioning, ) - assert wr.catalog.get_table_number_of_versions(table=glue_table, database=glue_database) == 1 - df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) + if table_type == "GOVERNED": + df2 = wr.lakeformation.read_sql_table(glue_table, glue_database, use_threads=use_threads) + else: + df2 = wr.athena.read_sql_table(glue_table, glue_database, use_threads=use_threads) assert len(df2.columns) == 3 assert len(df2.index) == 4 assert df2.c1.sum() == 6 @@ -247,8 +274,10 @@ def test_routine_0(glue_database, glue_table, path, use_threads, concurrent_part assert comments["c1"] == "one" assert comments["c2"] == "two" + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) -def test_routine_1(glue_database, glue_table, path): + +def test_routine_2(glue_database, glue_table, path): # Round 1 - Warm up df = pd.DataFrame({"c0": [0, None]}, dtype="Int64") @@ -441,3 +470,5 @@ def test_routine_1(glue_database, glue_table, path): assert comments["c0"] == "zero" assert comments["c1"] == "one" assert comments["c2"] == "two" + + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) diff --git a/tests/test_athena.py b/tests/test_athena.py index 0190b1be6..7ddf8f913 100644 --- a/tests/test_athena.py +++ b/tests/test_athena.py @@ -247,8 +247,8 @@ def test_athena_read_list(glue_database): def test_sanitize_names(): - assert wr.catalog.sanitize_column_name("CamelCase") == "camel_case" - assert wr.catalog.sanitize_column_name("CamelCase2") == "camel_case2" + assert wr.catalog.sanitize_column_name("CamelCase") == "camelcase" + assert wr.catalog.sanitize_column_name("CamelCase2") == "camelcase2" assert wr.catalog.sanitize_column_name("Camel_Case3") == "camel_case3" assert wr.catalog.sanitize_column_name("Cámël_Casë4仮") == "camel_case4_" assert wr.catalog.sanitize_column_name("Camel__Case5") == "camel__case5" @@ -256,8 +256,8 @@ def test_sanitize_names(): assert wr.catalog.sanitize_column_name("Camel.Case7") == "camel_case7" assert wr.catalog.sanitize_column_name("xyz_cd") == "xyz_cd" assert wr.catalog.sanitize_column_name("xyz_Cd") == "xyz_cd" - assert wr.catalog.sanitize_table_name("CamelCase") == "camel_case" - assert wr.catalog.sanitize_table_name("CamelCase2") == "camel_case2" + assert wr.catalog.sanitize_table_name("CamelCase") == "camelcase" + assert wr.catalog.sanitize_table_name("CamelCase2") == "camelcase2" assert wr.catalog.sanitize_table_name("Camel_Case3") == "camel_case3" assert wr.catalog.sanitize_table_name("Cámël_Casë4仮") == "camel_case4_" assert wr.catalog.sanitize_table_name("Camel__Case5") == "camel__case5" diff --git a/tests/test_athena_parquet.py b/tests/test_athena_parquet.py index 02657c45e..5da4f7c3c 100644 --- a/tests/test_athena_parquet.py +++ b/tests/test_athena_parquet.py @@ -504,9 +504,9 @@ def test_to_parquet_sanitize(path, glue_database): df2 = wr.athena.read_sql_table(database=glue_database, table=table_name) wr.catalog.delete_table_if_exists(database=glue_database, table="table_name_") assert df.shape == df2.shape - assert list(df2.columns) == ["c0", "camel_case", "c_2"] + assert list(df2.columns) == ["c0", "camelcase", "c_2"] assert df2.c0.sum() == 1 - assert df2.camel_case.sum() == 5 + assert df2.camelcase.sum() == 5 assert df2.c_2.sum() == 9 diff --git a/tests/test_catalog.py b/tests/test_catalog.py index 3e9a12d64..88b86492c 100644 --- a/tests/test_catalog.py +++ b/tests/test_catalog.py @@ -1,3 +1,8 @@ +import calendar +import logging +import time +from typing import Optional + import boto3 import pandas as pd import pytest @@ -6,24 +11,40 @@ from ._utils import ensure_data_types_csv, get_df_csv +logger = logging.getLogger("awswrangler") +logger.setLevel(logging.DEBUG) + -def test_catalog(path: str, glue_database: str, glue_table: str, account_id: str) -> None: +@pytest.mark.parametrize("table_type", ["EXTERNAL_TABLE", "GOVERNED"]) +def test_create_table(path: str, glue_database: str, glue_table: str, table_type: Optional[str]) -> None: + transaction_id = wr.lakeformation.start_transaction() if table_type == "GOVERNED" else None assert wr.catalog.does_table_exist(database=glue_database, table=glue_table) is False - wr.catalog.create_parquet_table( + wr.catalog.create_csv_table( database=glue_database, table=glue_table, path=path, columns_types={"col0": "int", "col1": "double"}, partitions_types={"y": "int", "m": "int"}, - compression="snappy", + table_type=table_type, + transaction_id=transaction_id, ) - with pytest.raises(wr.exceptions.InvalidArgumentValue): - wr.catalog.create_parquet_table( - database=glue_database, table=glue_table, path=path, columns_types={"col0": "string"}, mode="append" - ) + if transaction_id: + wr.lakeformation.commit_transaction(transaction_id) + query_as_of_time = calendar.timegm(time.gmtime()) + df = wr.catalog.table(database=glue_database, table=glue_table, query_as_of_time=query_as_of_time) + assert df.shape == (4, 4) assert wr.catalog.does_table_exist(database=glue_database, table=glue_table) is True - assert wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) is True - assert wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) is False + + +@pytest.mark.parametrize("table_type", ["EXTERNAL_TABLE", "GOVERNED"]) +@pytest.mark.parametrize("start_transaction", [True, False]) +def test_catalog( + path: str, glue_database: str, glue_table: str, table_type: Optional[str], start_transaction: bool, account_id: str +) -> None: + if table_type != "GOVERNED" and start_transaction: + pytest.skip() + + transaction_id = wr.lakeformation.start_transaction() if table_type == "GOVERNED" else None wr.catalog.create_parquet_table( database=glue_database, table=glue_table, @@ -34,33 +55,28 @@ def test_catalog(path: str, glue_database: str, glue_table: str, account_id: str description="Foo boo bar", parameters={"tag": "test"}, columns_comments={"col0": "my int", "y": "year"}, - mode="overwrite", - ) - wr.catalog.add_parquet_partitions( - database=glue_database, - table=glue_table, - partitions_values={f"{path}y=2020/m=1/": ["2020", "1"], f"{path}y=2021/m=2/": ["2021", "2"]}, - compression="snappy", - ) - assert wr.catalog.get_table_location(database=glue_database, table=glue_table) == path - # get_parquet_partitions - parquet_partitions_values = wr.catalog.get_parquet_partitions(database=glue_database, table=glue_table) - assert len(parquet_partitions_values) == 2 - parquet_partitions_values = wr.catalog.get_parquet_partitions( - database=glue_database, table=glue_table, catalog_id=account_id, expression="y = 2021 AND m = 2" + table_type=table_type, + transaction_id=transaction_id, ) - assert len(parquet_partitions_values) == 1 - assert len(set(parquet_partitions_values[f"{path}y=2021/m=2/"]) & {"2021", "2"}) == 2 - # get_partitions - partitions_values = wr.catalog.get_partitions(database=glue_database, table=glue_table) - assert len(partitions_values) == 2 - partitions_values = wr.catalog.get_partitions( - database=glue_database, table=glue_table, catalog_id=account_id, expression="y = 2021 AND m = 2" - ) - assert len(partitions_values) == 1 - assert len(set(partitions_values[f"{path}y=2021/m=2/"]) & {"2021", "2"}) == 2 + if transaction_id: + wr.lakeformation.commit_transaction(transaction_id=transaction_id) + with pytest.raises(wr.exceptions.InvalidArgumentValue): + wr.catalog.create_parquet_table( + database=glue_database, + table=glue_table, + path=path, + columns_types={"col0": "string"}, + mode="append", + table_type=table_type, + ) + assert wr.catalog.does_table_exist(database=glue_database, table=glue_table) is True - dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table) + # Cannot start a transaction before creating a table + transaction_id = wr.lakeformation.start_transaction() if table_type == "GOVERNED" and start_transaction else None + assert ( + wr.catalog.get_table_location(database=glue_database, table=glue_table, transaction_id=transaction_id) == path + ) + dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table, transaction_id=transaction_id) assert dtypes["col0"] == "int" assert dtypes["col1"] == "double" assert dtypes["y"] == "int" @@ -68,49 +84,57 @@ def test_catalog(path: str, glue_database: str, glue_table: str, account_id: str df_dbs = wr.catalog.databases() assert len(wr.catalog.databases(catalog_id=account_id)) == len(df_dbs) assert glue_database in df_dbs["Database"].to_list() - tables = list(wr.catalog.get_tables()) + tables = list(wr.catalog.get_tables(transaction_id=transaction_id)) assert len(tables) > 0 for tbl in tables: if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" - tables = list(wr.catalog.get_tables(database=glue_database)) + assert tbl["TableType"] == table_type + tables = list(wr.catalog.get_tables(database=glue_database, transaction_id=transaction_id)) assert len(tables) > 0 for tbl in tables: assert tbl["DatabaseName"] == glue_database # add & delete column wr.catalog.add_column( - database=glue_database, table=glue_table, column_name="col2", column_type="int", column_comment="comment" + database=glue_database, + table=glue_table, + column_name="col2", + column_type="int", + column_comment="comment", + transaction_id=transaction_id, ) - dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table) + dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table, transaction_id=transaction_id) assert len(dtypes) == 5 assert dtypes["col2"] == "int" - wr.catalog.delete_column(database=glue_database, table=glue_table, column_name="col2") - dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table) + wr.catalog.delete_column( + database=glue_database, table=glue_table, column_name="col2", transaction_id=transaction_id + ) + dtypes = wr.catalog.get_table_types(database=glue_database, table=glue_table, transaction_id=transaction_id) assert len(dtypes) == 4 - # search - tables = list(wr.catalog.search_tables(text="parquet", catalog_id=account_id)) - assert len(tables) > 0 - for tbl in tables: - if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" + # prefix - tables = list(wr.catalog.get_tables(name_prefix=glue_table[:4], catalog_id=account_id)) + tables = list( + wr.catalog.get_tables(name_prefix=glue_table[:4], catalog_id=account_id, transaction_id=transaction_id) + ) assert len(tables) > 0 for tbl in tables: if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" + assert tbl["TableType"] == table_type # suffix - tables = list(wr.catalog.get_tables(name_suffix=glue_table[-4:], catalog_id=account_id)) + tables = list( + wr.catalog.get_tables(name_suffix=glue_table[-4:], catalog_id=account_id, transaction_id=transaction_id) + ) assert len(tables) > 0 for tbl in tables: if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" + assert tbl["TableType"] == table_type # name_contains - tables = list(wr.catalog.get_tables(name_contains=glue_table[4:-4], catalog_id=account_id)) + tables = list( + wr.catalog.get_tables(name_contains=glue_table[4:-4], catalog_id=account_id, transaction_id=transaction_id) + ) assert len(tables) > 0 for tbl in tables: if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" + assert tbl["TableType"] == table_type # prefix & suffix & name_contains with pytest.raises(wr.exceptions.InvalidArgumentCombination): list( @@ -119,43 +143,100 @@ def test_catalog(path: str, glue_database: str, glue_table: str, account_id: str name_contains=glue_table[3], name_suffix=glue_table[-1], catalog_id=account_id, + transaction_id=transaction_id, ) ) # prefix & suffix - tables = list(wr.catalog.get_tables(name_prefix=glue_table[0], name_suffix=glue_table[-1], catalog_id=account_id)) + tables = list( + wr.catalog.get_tables( + name_prefix=glue_table[0], name_suffix=glue_table[-1], catalog_id=account_id, transaction_id=transaction_id + ) + ) assert len(tables) > 0 for tbl in tables: if tbl["Name"] == glue_table: - assert tbl["TableType"] == "EXTERNAL_TABLE" + assert tbl["TableType"] == table_type + + # search (Not supported for Governed tables) + if table_type != "GOVERNED": + assert ( + len( + wr.catalog.tables( + database=glue_database, + search_text="parquet", + name_prefix=glue_table[0], + name_contains=glue_table[3], + name_suffix=glue_table[-1], + catalog_id=account_id, + ).index + ) + > 0 + ) + tables = list(wr.catalog.search_tables(text="parquet", catalog_id=account_id)) + assert len(tables) > 0 + for tbl in tables: + if tbl["Name"] == glue_table: + assert tbl["TableType"] == table_type + # DataFrames assert len(wr.catalog.databases().index) > 0 - assert len(wr.catalog.tables().index) > 0 + assert len(wr.catalog.tables(transaction_id=transaction_id).index) > 0 + assert len(wr.catalog.table(database=glue_database, table=glue_table, transaction_id=transaction_id).index) > 0 assert ( len( - wr.catalog.tables( - database=glue_database, - search_text="parquet", - name_prefix=glue_table[0], - name_contains=glue_table[3], - name_suffix=glue_table[-1], - catalog_id=account_id, + wr.catalog.table( + database=glue_database, table=glue_table, catalog_id=account_id, transaction_id=transaction_id ).index ) > 0 ) - assert len(wr.catalog.table(database=glue_database, table=glue_table).index) > 0 - assert len(wr.catalog.table(database=glue_database, table=glue_table, catalog_id=account_id).index) > 0 with pytest.raises(wr.exceptions.InvalidTable): - wr.catalog.overwrite_table_parameters({"foo": "boo"}, glue_database, "fake_table") + wr.catalog.overwrite_table_parameters( + {"foo": "boo"}, glue_database, "fake_table", transaction_id=transaction_id + ) + + +def test_catalog_partitions(glue_database: str, glue_table: str, path: str, account_id: str) -> None: + assert wr.catalog.does_table_exist(database=glue_database, table=glue_table) is False + wr.catalog.create_parquet_table( + database=glue_database, + table=glue_table, + path=path, + columns_types={"col0": "int", "col1": "double"}, + partitions_types={"y": "int", "m": "int"}, + compression="snappy", + ) + wr.catalog.add_parquet_partitions( + database=glue_database, + table=glue_table, + partitions_values={f"{path}y=2020/m=1/": ["2020", "1"], f"{path}y=2021/m=2/": ["2021", "2"]}, + compression="snappy", + ) + # get_parquet_partitions + parquet_partitions_values = wr.catalog.get_parquet_partitions(database=glue_database, table=glue_table) + assert len(parquet_partitions_values) == 2 + parquet_partitions_values = wr.catalog.get_parquet_partitions( + database=glue_database, table=glue_table, catalog_id=account_id, expression="y = 2021 AND m = 2" + ) + assert len(parquet_partitions_values) == 1 + assert len(set(parquet_partitions_values[f"{path}y=2021/m=2/"]) & {"2021", "2"}) == 2 + # get_partitions + partitions_values = wr.catalog.get_partitions(database=glue_database, table=glue_table) + assert len(partitions_values) == 2 + partitions_values = wr.catalog.get_partitions( + database=glue_database, table=glue_table, catalog_id=account_id, expression="y = 2021 AND m = 2" + ) + assert len(partitions_values) == 1 + assert len(set(partitions_values[f"{path}y=2021/m=2/"]) & {"2021", "2"}) == 2 -def test_catalog_get_databases(glue_database): +def test_catalog_get_databases(glue_database: str) -> None: dbs = [db["Name"] for db in wr.catalog.get_databases()] assert len(dbs) > 0 assert glue_database in dbs -def test_catalog_versioning(path, glue_database, glue_table, glue_table2): +def test_catalog_versioning(path: str, glue_database: str, glue_table: str, glue_table2: str) -> None: wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) wr.s3.delete_objects(path=path) @@ -244,7 +325,7 @@ def test_catalog_versioning(path, glue_database, glue_table, glue_table2): assert str(df.c1.dtype).startswith("boolean") -def test_catalog_parameters(path, glue_database, glue_table): +def test_catalog_parameters(path: str, glue_database: str, glue_table: str) -> None: wr.s3.to_parquet( df=pd.DataFrame({"c0": [1, 2]}), path=path, @@ -296,7 +377,7 @@ def test_catalog_parameters(path, glue_database, glue_table): assert df.c0.sum() == 10 -def test_catalog_columns(path, glue_table, glue_database): +def test_catalog_columns(path: str, glue_table: str, glue_database: str) -> None: wr.s3.to_parquet( df=get_df_csv()[["id", "date", "timestamp", "par0", "par1"]], path=path, @@ -339,7 +420,7 @@ def test_catalog_columns(path, glue_table, glue_database): @pytest.mark.parametrize("use_catalog_id", [False, True]) -def test_create_database(random_glue_database: str, account_id: str, use_catalog_id: bool): +def test_create_database(random_glue_database: str, account_id: str, use_catalog_id: bool) -> None: if not use_catalog_id: account_id = None description = "foo" @@ -360,7 +441,7 @@ def test_create_database(random_glue_database: str, account_id: str, use_catalog assert r["Database"]["Description"] == description -def test_catalog_json(path: str, glue_database: str, glue_table: str, account_id: str): +def test_catalog_json(path: str, glue_database: str, glue_table: str) -> None: # Create JSON table assert not wr.catalog.does_table_exist(database=glue_database, table=glue_table) wr.catalog.create_json_table( diff --git a/tests/test_lakeformation.py b/tests/test_lakeformation.py new file mode 100644 index 000000000..77483dfb0 --- /dev/null +++ b/tests/test_lakeformation.py @@ -0,0 +1,144 @@ +import calendar +import logging +import time + +import pandas as pd + +import awswrangler as wr + +from ._utils import ensure_data_types, ensure_data_types_csv, get_df, get_df_csv + +logging.getLogger("awswrangler").setLevel(logging.DEBUG) + + +def test_lakeformation(path, path2, glue_database, glue_table, glue_table2, use_threads=False): + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table2) + + wr.s3.to_parquet( + df=get_df(governed=True), + path=path, + index=False, + boto3_session=None, + s3_additional_kwargs=None, + dataset=True, + partition_cols=["par0", "par1"], + mode="overwrite", + table=glue_table, + table_type="GOVERNED", + database=glue_database, + ) + + df = wr.lakeformation.read_sql_table( + table=glue_table, + database=glue_database, + use_threads=use_threads, + ) + assert len(df.index) == 3 + assert len(df.columns) == 14 + assert df["iint32"].sum() == 3 + ensure_data_types(df=df) + + # Filter query + df2 = wr.lakeformation.read_sql_query( + sql=f"SELECT * FROM {glue_table} WHERE iint16 = :iint16;", + database=glue_database, + params={"iint16": 1}, + ) + assert len(df2.index) == 1 + + wr.s3.to_csv( + df=get_df_csv(), + path=path2, + index=False, + boto3_session=None, + s3_additional_kwargs=None, + dataset=True, + partition_cols=["par0", "par1"], + mode="append", + table=glue_table2, + table_type="GOVERNED", + database=glue_database, + ) + # Read within a transaction + transaction_id = wr.lakeformation.start_transaction(read_only=True) + df3 = wr.lakeformation.read_sql_table( + table=glue_table2, + database=glue_database, + transaction_id=transaction_id, + use_threads=use_threads, + ) + assert df3["int"].sum() == 3 + ensure_data_types_csv(df3) + + # Read within a query as of time + query_as_of_time = calendar.timegm(time.gmtime()) + df4 = wr.lakeformation.read_sql_table( + table=glue_table2, + database=glue_database, + query_as_of_time=query_as_of_time, + use_threads=use_threads, + ) + assert len(df4.index) == 3 + + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table2) + + +def test_lakeformation_multi_transaction(path, path2, glue_database, glue_table, glue_table2, use_threads=True): + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table2) + + df = pd.DataFrame({"c0": [0, None]}, dtype="Int64") + transaction_id = wr.lakeformation.start_transaction(read_only=False) + wr.s3.to_parquet( + df=df, + path=path, + dataset=True, + mode="append", + database=glue_database, + table=glue_table, + table_type="GOVERNED", + transaction_id=transaction_id, + description="c0", + parameters={"num_cols": str(len(df.columns)), "num_rows": str(len(df.index))}, + columns_comments={"c0": "0"}, + use_threads=use_threads, + ) + + df2 = pd.DataFrame({"c1": [None, 1, None]}, dtype="Int16") + wr.s3.to_parquet( + df=df2, + path=path2, + dataset=True, + mode="append", + database=glue_database, + table=glue_table2, + table_type="GOVERNED", + transaction_id=transaction_id, + description="c1", + parameters={"num_cols": str(len(df.columns)), "num_rows": str(len(df.index))}, + columns_comments={"c1": "1"}, + use_threads=use_threads, + ) + wr.lakeformation.commit_transaction(transaction_id=transaction_id) + + df3 = wr.lakeformation.read_sql_table( + table=glue_table, + database=glue_database, + use_threads=use_threads, + ) + df4 = wr.lakeformation.read_sql_table( + table=glue_table2, + database=glue_database, + use_threads=use_threads, + ) + + assert df.shape == df3.shape + assert df.c0.sum() == df3.c0.sum() + + assert df2.shape == df4.shape + assert df2.c1.sum() == df4.c1.sum() + + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table) + wr.catalog.delete_table_if_exists(database=glue_database, table=glue_table2) diff --git a/tests/test_moto.py b/tests/test_moto.py index 600d89f8a..6383a784d 100644 --- a/tests/test_moto.py +++ b/tests/test_moto.py @@ -173,6 +173,7 @@ def test_size_list_of_objects_succeed(moto_s3): assert size == {"s3://bucket/foo/foo.tmp": 6, "s3://bucket/bar/bar.tmp": 3} +@pytest.mark.xfail() def test_copy_one_object_without_replace_filename_succeed(moto_s3): bucket = "bucket" key = "foo/foo.tmp" @@ -193,6 +194,7 @@ def test_copy_one_object_without_replace_filename_succeed(moto_s3): ) +@pytest.mark.xfail() def test_copy_one_object_with_replace_filename_succeed(moto_s3): bucket = "bucket" key = "foo/foo.tmp" diff --git a/tests/test_s3.py b/tests/test_s3.py index 3341d526f..4e486585b 100644 --- a/tests/test_s3.py +++ b/tests/test_s3.py @@ -21,7 +21,7 @@ def test_list_buckets() -> None: assert len(wr.s3.list_buckets()) > 0 -@pytest.mark.parametrize("sanitize_columns,col", [(True, "foo_boo"), (False, "FooBoo")]) +@pytest.mark.parametrize("sanitize_columns,col", [(True, "fooboo"), (False, "FooBoo")]) def test_sanitize_columns(path, sanitize_columns, col): df = pd.DataFrame({"FooBoo": [1, 2, 3]}) diff --git a/tests/test_s3_parquet.py b/tests/test_s3_parquet.py index 4d8345bc4..2842ae639 100644 --- a/tests/test_s3_parquet.py +++ b/tests/test_s3_parquet.py @@ -195,9 +195,9 @@ def test_to_parquet_file_sanitize(path): wr.s3.to_parquet(df, path_file, sanitize_columns=True) df2 = wr.s3.read_parquet(path_file) assert df.shape == df2.shape - assert list(df2.columns) == ["c0", "camel_case", "c_2"] + assert list(df2.columns) == ["c0", "camelcase", "c_2"] assert df2.c0.sum() == 1 - assert df2.camel_case.sum() == 5 + assert df2.camelcase.sum() == 5 assert df2.c_2.sum() == 9 @@ -423,9 +423,9 @@ def test_to_parquet_dataset_sanitize(path): wr.s3.to_parquet(df, path, dataset=True, partition_cols=["par"], sanitize_columns=True, mode="overwrite") df2 = wr.s3.read_parquet(path, dataset=True) assert df.shape == df2.shape - assert list(df2.columns) == ["c0", "camel_case", "c_2", "par"] + assert list(df2.columns) == ["c0", "camelcase", "c_2", "par"] assert df2.c0.sum() == 1 - assert df2.camel_case.sum() == 5 + assert df2.camelcase.sum() == 5 assert df2.c_2.sum() == 9 assert df2.par.to_list() == ["a", "b"] diff --git a/tox.ini b/tox.ini index f20869e97..017d9458c 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ deps = pytest-xdist==2.4.0 pytest-timeout==2.0.1 moto==2.2.12 - s3fs==2021.10.0 + s3fs==0.4.2 commands = pytest -n {posargs} -s -v --timeout=300 --reruns=2 --reruns-delay=60 \ --junitxml=test-reports/junit.xml --log-file=test-reports/logs.txt tests diff --git a/tutorials/032 - Lake Formation Governed Tables.ipynb b/tutorials/032 - Lake Formation Governed Tables.ipynb new file mode 100644 index 000000000..0a7b47241 --- /dev/null +++ b/tutorials/032 - Lake Formation Governed Tables.ipynb @@ -0,0 +1,441 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[![AWS Data Wrangler](_static/logo.png \"AWS Data Wrangler\")](https://github.com/awslabs/aws-data-wrangler)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 32 - AWS Lake Formation - Glue Governed tables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### This tutorial assumes that your IAM user/role has the required Lake Formation permissions to create and read AWS Glue Governed tables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Table of Contents\n", + "* [1. Read Governed table](#1.-Read-Governed-table)\n", + " * [1.1 Read PartiQL query](#1.1-Read-PartiQL-query)\n", + " * [1.1.1 Read within transaction](#1.1.1-Read-within-transaction)\n", + " * [1.1.2 Read within query as of time](#1.1.2-Read-within-query-as-of-time)\n", + " * [1.2 Read full table](#1.2-Read-full-table)\n", + "* [2. Write Governed table](#2.-Write-Governed-table)\n", + " * [2.1 Create new Governed table](#2.1-Create-new-Governed-table)\n", + " * [2.1.1 CSV table](#2.1.1-CSV-table)\n", + " * [2.1.2 Parquet table](#2.1.2-Parquet-table)\n", + " * [2.2 Overwrite operations](#2.2-Overwrite-operations)\n", + " * [2.2.1 Overwrite](#2.2.1-Overwrite)\n", + " * [2.2.2 Append](#2.2.2-Append)\n", + " * [2.2.3 Create partitioned Governed table](#2.2.3-Create-partitioned-Governed-table)\n", + " * [2.2.4 Overwrite partitions](#2.2.4-Overwrite-partitions)\n", + "* [3. Multiple read/write operations within a transaction](#2.-Multiple-read/write-operations-within-a-transaction)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 1. Read Governed table" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1.1 Read PartiQL query" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import awswrangler as wr\n", + "\n", + "database = \"gov_db\" # Assumes a Glue database registered with Lake Formation exists in the account\n", + "table = \"gov_table\" # Assumes a Governed table exists in the account\n", + "catalog_id = \"111111111111\" # AWS Account Id\n", + "\n", + "# Note 1: If a transaction_id is not specified, a new transaction is started\n", + "df = wr.lakeformation.read_sql_query(\n", + " sql=f\"SELECT * FROM {table};\",\n", + " database=database,\n", + " catalog_id=catalog_id\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.1.1 Read within transaction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "transaction_id = wr.lakeformation.start_transaction(read_only=True)\n", + "df = wr.lakeformation.read_sql_query(\n", + " sql=f\"SELECT * FROM {table};\",\n", + " database=database,\n", + " transaction_id=transaction_id\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.1.2 Read within query as of time" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import calendar\n", + "import time\n", + "\n", + "query_as_of_time = query_as_of_time = calendar.timegm(time.gmtime())\n", + "df = wr.lakeformation.read_sql_query(\n", + " sql=f\"SELECT * FROM {table} WHERE id=:id; AND name=:name;\",\n", + " database=database,\n", + " query_as_of_time=query_as_of_time,\n", + " params={\"id\": 1, \"name\": \"Ayoub\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1.2 Read full table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = wr.lakeformation.read_sql_table(\n", + " table=table,\n", + " database=database\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 2. Write Governed table" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2.1 Create a new Governed table" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Enter your bucket name:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "\n", + "bucket = getpass.getpass()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### If a governed table does not exist, it can be created by passing an S3 `path` argument. Make sure your IAM user/role has enough permissions in the Lake Formation database" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1.1 CSV table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "\n", + "table = \"gov_table_csv\"\n", + "\n", + "df=pd.DataFrame({\n", + " \"col\": [1, 2, 3],\n", + " \"col2\": [\"A\", \"A\", \"B\"],\n", + " \"col3\": [None, \"test\", None]\n", + "})\n", + "# Note 1: If a transaction_id is not specified, a new transaction is started\n", + "# Note 2: When creating a new Governed table, `table_type=\"GOVERNED\"` must be specified. Otherwise the default is to create an EXTERNAL_TABLE\n", + "wr.s3.to_csv(\n", + " df=df,\n", + " path=f\"s3://{bucket}/{database}/{table}/\", # S3 path\n", + " dataset=True,\n", + " database=database,\n", + " table=table,\n", + " table_type=\"GOVERNED\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1.2 Parquet table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "table = \"gov_table_parquet\"\n", + "\n", + "df = pd.DataFrame({\"c0\": [0, None]}, dtype=\"Int64\")\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " path=f\"s3://{bucket}/{database}/{table}/\",\n", + " dataset=True,\n", + " database=database,\n", + " table=table,\n", + " table_type=\"GOVERNED\",\n", + " description=\"c0\",\n", + " parameters={\"num_cols\": str(len(df.columns)), \"num_rows\": str(len(df.index))},\n", + " columns_comments={\"c0\": \"0\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2.2 Overwrite operations" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2.1 Overwrite" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.DataFrame({\"c1\": [None, 1, None]}, dtype=\"Int16\")\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " dataset=True,\n", + " mode=\"overwrite\",\n", + " database=database,\n", + " table=table,\n", + " description=\"c1\",\n", + " parameters={\"num_cols\": str(len(df.columns)), \"num_rows\": str(len(df.index))},\n", + " columns_comments={\"c1\": \"1\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2.2 Append" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.DataFrame({\"c1\": [None, 2, None]}, dtype=\"Int8\")\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " dataset=True,\n", + " mode=\"append\",\n", + " database=database,\n", + " table=table,\n", + " description=\"c1\",\n", + " parameters={\"num_cols\": str(len(df.columns)), \"num_rows\": str(len(df.index) * 2)},\n", + " columns_comments={\"c1\": \"1\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2.3 Create partitioned Governed table" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "table = \"gov_table_parquet_partitioned\"\n", + "\n", + "df = pd.DataFrame({\"c0\": [\"foo\", None], \"c1\": [0, 1]})\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " path=f\"s3://{bucket}/{database}/{table}/\",\n", + " dataset=True,\n", + " database=database,\n", + " table=table,\n", + " table_type=\"GOVERNED\",\n", + " partition_cols=[\"c1\"],\n", + " description=\"c0+c1\",\n", + " parameters={\"num_cols\": \"2\", \"num_rows\": \"2\"},\n", + " columns_comments={\"c0\": \"zero\", \"c1\": \"one\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.2.4 Overwrite partitions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "df = pd.DataFrame({\"c0\": [None, None], \"c1\": [0, 2]})\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " dataset=True,\n", + " mode=\"overwrite_partitions\",\n", + " database=database,\n", + " table=table,\n", + " partition_cols=[\"c1\"],\n", + " description=\"c0+c1\",\n", + " parameters={\"num_cols\": \"2\", \"num_rows\": \"3\"},\n", + " columns_comments={\"c0\": \"zero\", \"c1\": \"one\"}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 3. Multiple read/write operations within a transaction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "read_table = \"gov_table_parquet\"\n", + "write_table = \"gov_table_multi_parquet\"\n", + "\n", + "transaction_id = wr.lakeformation.start_transaction(read_only=False)\n", + "\n", + "df = pd.DataFrame({\"c0\": [0, None]}, dtype=\"Int64\")\n", + "wr.s3.to_parquet(\n", + " df=df,\n", + " path=f\"s3://{bucket}/{database}/{write_table}_1\",\n", + " dataset=True,\n", + " database=database,\n", + " table=f\"{write_table}_1\",\n", + " table_type=\"GOVERNED\",\n", + " transaction_id=transaction_id,\n", + ")\n", + "\n", + "df2 = wr.lakeformation.read_sql_table(\n", + " table=read_table,\n", + " database=database,\n", + " transaction_id=transaction_id,\n", + " use_threads=True\n", + ")\n", + "\n", + "df3 = pd.DataFrame({\"c1\": [None, 1, None]}, dtype=\"Int16\")\n", + "wr.s3.to_parquet(\n", + " df=df2,\n", + " path=f\"s3://{bucket}/{database}/{write_table}_2\",\n", + " dataset=True,\n", + " mode=\"append\",\n", + " database=database,\n", + " table=f\"{write_table}_2\",\n", + " table_type=\"GOVERNED\",\n", + " transaction_id=transaction_id,\n", + ")\n", + "\n", + "wr.lakeformation.commit_transaction(transaction_id=transaction_id)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.9.1 64-bit ('.venv': venv)", + "metadata": { + "interpreter": { + "hash": "2878c7ae46413c5ab07cafef85a7415922732432fa2f847b9105997e244ed975" + } + }, + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" + }, + "orig_nbformat": 2 + }, + "nbformat": 4, + "nbformat_minor": 2 +}