From 572dd6ae1e8519721bb3b065c6f2291b9dfc83b4 Mon Sep 17 00:00:00 2001 From: "guangjun.hgj" Date: Wed, 18 Sep 2024 20:42:55 +0800 Subject: [PATCH] 1.0.0.dev --- .gitignore | 10 + CHANGELOG.md | 1 + DEVGUIDE.md | 0 MANIFEST.in | 3 + README.md | 144 + alibabacloud_oss_v2/__init__.py | 56 + alibabacloud_oss_v2/_client.py | 612 ++++ alibabacloud_oss_v2/_version.py | 1 + alibabacloud_oss_v2/checkpoint.py | 385 +++ alibabacloud_oss_v2/client.py | 904 ++++++ alibabacloud_oss_v2/compat.py | 0 alibabacloud_oss_v2/config.py | 82 + alibabacloud_oss_v2/crc.py | 238 ++ alibabacloud_oss_v2/credentials/__init__.py | 2 + .../credentials/provider_impl.py | 58 + alibabacloud_oss_v2/crypto/__init__.py | 3 + alibabacloud_oss_v2/crypto/aes_ctr.py | 267 ++ alibabacloud_oss_v2/crypto/aes_ctr_cipher.py | 105 + alibabacloud_oss_v2/crypto/cipher.py | 23 + .../crypto/master_rsa_cipher.py | 49 + alibabacloud_oss_v2/crypto/types.py | 151 + alibabacloud_oss_v2/defaults.py | 64 + alibabacloud_oss_v2/downloader.py | 546 ++++ alibabacloud_oss_v2/encryption_client.py | 423 +++ alibabacloud_oss_v2/endpoints.py | 59 + alibabacloud_oss_v2/exceptions.py | 225 ++ alibabacloud_oss_v2/filelike.py | 908 ++++++ alibabacloud_oss_v2/io_utils.py | 594 ++++ alibabacloud_oss_v2/models/__init__.py | 6 + alibabacloud_oss_v2/models/bucket_basic.py | 1258 ++++++++ alibabacloud_oss_v2/models/enums.py | 109 + alibabacloud_oss_v2/models/object_basic.py | 2810 +++++++++++++++++ alibabacloud_oss_v2/models/region.py | 84 + alibabacloud_oss_v2/models/service.py | 143 + alibabacloud_oss_v2/operations/__init__.py | 5 + .../operations/bucket_basic.py | 478 +++ .../operations/object_basic.py | 984 ++++++ alibabacloud_oss_v2/operations/region.py | 40 + alibabacloud_oss_v2/operations/service.py | 44 + alibabacloud_oss_v2/paginator.py | 321 ++ alibabacloud_oss_v2/presigner.py | 220 ++ alibabacloud_oss_v2/progress.py | 40 + alibabacloud_oss_v2/retry/__init__.py | 16 + alibabacloud_oss_v2/retry/backoff.py | 116 + alibabacloud_oss_v2/retry/error_retryable.py | 98 + alibabacloud_oss_v2/retry/retryer_impl.py | 67 + alibabacloud_oss_v2/serde.py | 695 ++++ alibabacloud_oss_v2/serde_utils.py | 308 ++ alibabacloud_oss_v2/signer/__init__.py | 5 + alibabacloud_oss_v2/signer/v1.py | 230 ++ alibabacloud_oss_v2/signer/v4.py | 332 ++ alibabacloud_oss_v2/transport/__init__.py | 2 + .../transport/requests_client.py | 208 ++ alibabacloud_oss_v2/types.py | 451 +++ alibabacloud_oss_v2/uploader.py | 677 ++++ alibabacloud_oss_v2/utils.py | 244 ++ alibabacloud_oss_v2/validation.py | 58 + sample/append_object.py | 71 + sample/copy_object.py | 56 + sample/delete_bucket.py | 40 + sample/delete_multiple_objects.py | 54 + sample/delete_object.py | 45 + sample/get_bucket_info.py | 63 + sample/get_bucket_location.py | 42 + sample/get_bucket_stat.py | 62 + sample/get_bucket_version.py | 43 + sample/get_object.py | 66 + sample/get_object_meta.py | 50 + sample/head_object.py | 70 + sample/list_buckets.py | 43 + sample/list_multipart_uploads.py | 45 + sample/list_object_versions.py | 44 + sample/list_objects.py | 45 + sample/list_objects_v2.py | 45 + sample/list_parts.py | 50 + sample/put_bucket.py | 44 + sample/put_bucket_version.py | 45 + sample/put_object.py | 50 + sample/restore_object.py | 67 + setup.cfg | 2 + setup.py | 58 + tests/__init__.py | 1 + tests/data/enc-example.jpg | Bin 0 -> 21839 bytes tests/data/example.jpg | Bin 0 -> 21839 bytes tests/integration/__init__.py | 212 ++ tests/integration/test_client.py | 2227 +++++++++++++ tests/integration/test_client_filelike.py | 984 ++++++ tests/integration/test_encryption_client.py | 470 +++ tests/unit/__init__.py | 109 + tests/unit/crypto/__init__.py | 0 tests/unit/crypto/test_aes_ctr.py | 337 ++ tests/unit/crypto/test_master_rsa_cipher.py | 116 + tests/unit/models/__init__.py | 0 tests/unit/models/test_bucket_basic.py | 1639 ++++++++++ tests/unit/models/test_object_basic.py | 1721 ++++++++++ tests/unit/models/test_region.py | 121 + tests/unit/models/test_service.py | 7 + tests/unit/operations/__init__.py | 77 + tests/unit/operations/test_bucket_basic.py | 423 +++ tests/unit/operations/test_object_basic.py | 523 +++ tests/unit/operations/test_region.py | 15 + tests/unit/operations/test_service.py | 8 + tests/unit/retry/__init__.py | 0 tests/unit/retry/test_backoff.py | 40 + tests/unit/retry/test_error_retryable.py | 65 + tests/unit/retry/test_retryer_impl.py | 99 + tests/unit/signer/__init__.py | 0 tests/unit/signer/test_v1.py | 229 ++ tests/unit/signer/test_v4.py | 432 +++ tests/unit/test_checkpoint.py | 409 +++ tests/unit/test_client.py | 179 ++ tests/unit/test_config.py | 6 + tests/unit/test_crc.py | 30 + tests/unit/test_io_utils.py | 39 + tests/unit/test_presign.py | 351 ++ tests/unit/test_serde.py | 1917 +++++++++++ tests/unit/test_utils.py | 24 + tests/unit/transport/__init__.py | 0 tests/unit/transport/test_requests_client.py | 1 + 119 files changed, 29973 insertions(+) create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 DEVGUIDE.md create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 alibabacloud_oss_v2/__init__.py create mode 100644 alibabacloud_oss_v2/_client.py create mode 100644 alibabacloud_oss_v2/_version.py create mode 100644 alibabacloud_oss_v2/checkpoint.py create mode 100644 alibabacloud_oss_v2/client.py create mode 100644 alibabacloud_oss_v2/compat.py create mode 100644 alibabacloud_oss_v2/config.py create mode 100644 alibabacloud_oss_v2/crc.py create mode 100644 alibabacloud_oss_v2/credentials/__init__.py create mode 100644 alibabacloud_oss_v2/credentials/provider_impl.py create mode 100644 alibabacloud_oss_v2/crypto/__init__.py create mode 100644 alibabacloud_oss_v2/crypto/aes_ctr.py create mode 100644 alibabacloud_oss_v2/crypto/aes_ctr_cipher.py create mode 100644 alibabacloud_oss_v2/crypto/cipher.py create mode 100644 alibabacloud_oss_v2/crypto/master_rsa_cipher.py create mode 100644 alibabacloud_oss_v2/crypto/types.py create mode 100644 alibabacloud_oss_v2/defaults.py create mode 100644 alibabacloud_oss_v2/downloader.py create mode 100644 alibabacloud_oss_v2/encryption_client.py create mode 100644 alibabacloud_oss_v2/endpoints.py create mode 100644 alibabacloud_oss_v2/exceptions.py create mode 100644 alibabacloud_oss_v2/filelike.py create mode 100644 alibabacloud_oss_v2/io_utils.py create mode 100644 alibabacloud_oss_v2/models/__init__.py create mode 100644 alibabacloud_oss_v2/models/bucket_basic.py create mode 100644 alibabacloud_oss_v2/models/enums.py create mode 100644 alibabacloud_oss_v2/models/object_basic.py create mode 100644 alibabacloud_oss_v2/models/region.py create mode 100644 alibabacloud_oss_v2/models/service.py create mode 100644 alibabacloud_oss_v2/operations/__init__.py create mode 100644 alibabacloud_oss_v2/operations/bucket_basic.py create mode 100644 alibabacloud_oss_v2/operations/object_basic.py create mode 100644 alibabacloud_oss_v2/operations/region.py create mode 100644 alibabacloud_oss_v2/operations/service.py create mode 100644 alibabacloud_oss_v2/paginator.py create mode 100644 alibabacloud_oss_v2/presigner.py create mode 100644 alibabacloud_oss_v2/progress.py create mode 100644 alibabacloud_oss_v2/retry/__init__.py create mode 100644 alibabacloud_oss_v2/retry/backoff.py create mode 100644 alibabacloud_oss_v2/retry/error_retryable.py create mode 100644 alibabacloud_oss_v2/retry/retryer_impl.py create mode 100644 alibabacloud_oss_v2/serde.py create mode 100644 alibabacloud_oss_v2/serde_utils.py create mode 100644 alibabacloud_oss_v2/signer/__init__.py create mode 100644 alibabacloud_oss_v2/signer/v1.py create mode 100644 alibabacloud_oss_v2/signer/v4.py create mode 100644 alibabacloud_oss_v2/transport/__init__.py create mode 100644 alibabacloud_oss_v2/transport/requests_client.py create mode 100644 alibabacloud_oss_v2/types.py create mode 100644 alibabacloud_oss_v2/uploader.py create mode 100644 alibabacloud_oss_v2/utils.py create mode 100644 alibabacloud_oss_v2/validation.py create mode 100644 sample/append_object.py create mode 100644 sample/copy_object.py create mode 100644 sample/delete_bucket.py create mode 100644 sample/delete_multiple_objects.py create mode 100644 sample/delete_object.py create mode 100644 sample/get_bucket_info.py create mode 100644 sample/get_bucket_location.py create mode 100644 sample/get_bucket_stat.py create mode 100644 sample/get_bucket_version.py create mode 100644 sample/get_object.py create mode 100644 sample/get_object_meta.py create mode 100644 sample/head_object.py create mode 100644 sample/list_buckets.py create mode 100644 sample/list_multipart_uploads.py create mode 100644 sample/list_object_versions.py create mode 100644 sample/list_objects.py create mode 100644 sample/list_objects_v2.py create mode 100644 sample/list_parts.py create mode 100644 sample/put_bucket.py create mode 100644 sample/put_bucket_version.py create mode 100644 sample/put_object.py create mode 100644 sample/restore_object.py create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/data/enc-example.jpg create mode 100644 tests/data/example.jpg create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/test_client.py create mode 100644 tests/integration/test_client_filelike.py create mode 100644 tests/integration/test_encryption_client.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/crypto/__init__.py create mode 100644 tests/unit/crypto/test_aes_ctr.py create mode 100644 tests/unit/crypto/test_master_rsa_cipher.py create mode 100644 tests/unit/models/__init__.py create mode 100644 tests/unit/models/test_bucket_basic.py create mode 100644 tests/unit/models/test_object_basic.py create mode 100644 tests/unit/models/test_region.py create mode 100644 tests/unit/models/test_service.py create mode 100644 tests/unit/operations/__init__.py create mode 100644 tests/unit/operations/test_bucket_basic.py create mode 100644 tests/unit/operations/test_object_basic.py create mode 100644 tests/unit/operations/test_region.py create mode 100644 tests/unit/operations/test_service.py create mode 100644 tests/unit/retry/__init__.py create mode 100644 tests/unit/retry/test_backoff.py create mode 100644 tests/unit/retry/test_error_retryable.py create mode 100644 tests/unit/retry/test_retryer_impl.py create mode 100644 tests/unit/signer/__init__.py create mode 100644 tests/unit/signer/test_v1.py create mode 100644 tests/unit/signer/test_v4.py create mode 100644 tests/unit/test_checkpoint.py create mode 100644 tests/unit/test_client.py create mode 100644 tests/unit/test_config.py create mode 100644 tests/unit/test_crc.py create mode 100644 tests/unit/test_io_utils.py create mode 100644 tests/unit/test_presign.py create mode 100644 tests/unit/test_serde.py create mode 100644 tests/unit/test_utils.py create mode 100644 tests/unit/transport/__init__.py create mode 100644 tests/unit/transport/test_requests_client.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2340bf --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +*.pyc +.settings +.coverage +.pydevproject +.vscode/ +.idea +.DS_Store +dist +build +*egg-info diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..8718b02 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ +# ChangeLog - Alibaba Cloud OSS SDK for Python v2 diff --git a/DEVGUIDE.md b/DEVGUIDE.md new file mode 100644 index 0000000..e69de29 diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..73c6558 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include LICENSE +include README.md +include CHANGELOG.md \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..6b732f6 --- /dev/null +++ b/README.md @@ -0,0 +1,144 @@ +# Alibaba Cloud OSS SDK for Python v2 + +[![GitHub version](https://badge.fury.io/gh/aliyun%2Falibabacloud-oss-python-sdk-v2.svg)](https://badge.fury.io/gh/aliyun%2Falibabacloud-oss-python-sdk-v2) + +alibabacloud-oss-python-sdk-v2 is the Developer Preview for the v2 of the OSS SDK for the Python programming language + +## [README in Chinese](README-CN.md) + +## About +> - This Python SDK is based on the official APIs of [Alibaba Cloud OSS](http://www.aliyun.com/product/oss/). +> - Alibaba Cloud Object Storage Service (OSS) is a cloud storage service provided by Alibaba Cloud, featuring massive capacity, security, a low cost, and high reliability. +> - The OSS can store any type of files and therefore applies to various websites, development enterprises and developers. +> - With this SDK, you can upload, download and manage data on any app anytime and anywhere conveniently. + +## Running Environment +> - Python 3.8 or above. + +## Installing +### Install the official release version through pip +```bash +$ pip install alibabacloud-oss-v2 +``` + +### Install from the unzipped installer package directly +```bash +$ sudo python setup.py install +``` + +## Getting Started +#### List Bucket +```python +import alibabacloud_oss_v2 as oss + +def main(): + + region = "cn-hangzhou" + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = region + + client = oss.Client(cfg) + + # Create the Paginator for the ListBuckets operation + paginator = client.list_buckets_paginator() + + # Iterate through the bucket pages + for page in paginator.iter_page(oss.ListBucketsRequest( + ) + ): + for o in page.buckets: + print(f'Bucket: {o.name}, {o.location}, {o.creation_date} {o.resource_group_id}') + +if __name__ == "__main__": + main() + +``` + +#### List Objects +```python +import alibabacloud_oss_v2 as oss + +def main(): + + region = "cn-hangzhou" + bucket_name = "your bucket name" + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = region + + client = oss.Client(cfg) + + # Create the Paginator for the ListObjectsV2 operation + paginator = client.list_objects_v2_paginator() + + # Iterate through the object pages + for page in paginator.iter_page(oss.ListObjectsV2Request( + bucket=bucket_name + ) + ): + for o in page.contents: + print(f'Object: {o.key}, {o.size}, {o.last_modified}') + +if __name__ == "__main__": + main() + +``` + +#### Put Object +```python +import alibabacloud_oss_v2 as oss + +def main(): + + region = "cn-hangzhou" + bucket_name = "your bucket name" + object_name = "your object name" + local_file = "your local file path" + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = region + + + client = oss.Client(cfg) + + with open(local_file, 'rb') as f: + result = client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=object_name, + body=f, + )) + + print(f'put object sucessfully, ETag {result.etag}') + + +if __name__ == "__main__": + main() + +``` + +## Complete Example +More example projects can be found in the `sample` folder + +### Running Example +> - Go to the sample code folder `sample`。 +> - Configure credentials values from the environment variables, like `export OSS_ACCESS_KEY_ID="your access key id"`, `export OSS_ACCESS_KEY_SECRET="your access key secrect"` +> - Take list_buckets.python as an example,run `python list_buckets.python -region cn-hangzhou` command。 + +## License +> - Apache-2.0, see [license file](LICENSE) diff --git a/alibabacloud_oss_v2/__init__.py b/alibabacloud_oss_v2/__init__.py new file mode 100644 index 0000000..37151c0 --- /dev/null +++ b/alibabacloud_oss_v2/__init__.py @@ -0,0 +1,56 @@ +from .types import * + +# sub mod +from . import credentials +from . import retry +from . import signer +from . import transport +from . import models +from . import exceptions +from . import crypto +from . import checkpoint + +# all types in models +from .models.enums import * +from .models.service import * +from .models.region import * +from .models.bucket_basic import * +from .models.object_basic import * + +from .config import Config +from .client import Client +from .encryption_client import EncryptionClient, EncryptionMultiPartContext + +from .downloader import ( + Downloader, + DownloadResult, + DownloadError, +) + +from .uploader import ( + Uploader, + UploadResult, + UploadError +) + +from .paginator import ( + ListObjectsPaginator, + ListObjectsV2Paginator, + ListObjectVersionsPaginator, + ListBucketsPaginator, + ListPartsPaginator, + ListMultipartUploadsPaginator +) + +from .filelike import ( + AppendOnlyFile, + ReadOnlyFile, + PathError +) + +from .io_utils import ( + StreamBodyDiscarder +) + +from ._version import VERSION +__version__ = VERSION diff --git a/alibabacloud_oss_v2/_client.py b/alibabacloud_oss_v2/_client.py new file mode 100644 index 0000000..3ad27c9 --- /dev/null +++ b/alibabacloud_oss_v2/_client.py @@ -0,0 +1,612 @@ +import copy +import time +import base64 +import re +from typing import Any, Optional, Dict, Iterable, IO, List, Union, cast +from urllib.parse import urlparse, ParseResult, urlencode, quote +from xml.etree import ElementTree as ET +from . import retry +from . import transport +from . import exceptions +from . import utils +from . import defaults +from . import validation +from . import serde +from . import io_utils +from . import endpoints +from .signer import SignerV4, SignerV1 +from .credentials import AnonymousCredentialsProvider +from .config import Config +from .types import ( + Retryer, + CredentialsProvider, + HttpClient, + HttpRequest, + HttpResponse, + SigningContext, + Signer, + BodyType, + OperationInput, + OperationOutput, +) + +#Feature Flags +FF_CORRECT_CLOCK_SKEW = 0x00000001 +FF_ENABLE_MD5 = 0x00000002 +FF_AUTO_DETECT_MIME_TYPE = 0x00000004 +FF_ENABLE_CRC64_CHECK_UPLOAD = 0x00000008 +FF_ENABLE_CRC64_CHECK_DOWNLOAD = 0x00000010 + +FF_DEFAULT = (FF_CORRECT_CLOCK_SKEW + FF_AUTO_DETECT_MIME_TYPE + + FF_ENABLE_CRC64_CHECK_UPLOAD + FF_ENABLE_CRC64_CHECK_DOWNLOAD) + +class AddressStyle(): + """_summary_ + """ + Virtual = 1 + Path = 2 + CName = 3 + + +class _MarkedBody: + def __init__( + self, + body: BodyType, + ) -> None: + self._body = body + self._io_curr: int = 0 + if body is None: + self._seekable = True + elif isinstance(body, io_utils.TeeIterator): + self._seekable = body.seekable() + elif isinstance(body, (str, bytes, Iterable)): + self._seekable = True + elif isinstance(body, IO): + self._seekable = utils.is_seekable(body) + else: + self._seekable = False + + def is_seekable(self) -> bool: + """'_summary_ + + Returns: + bool: _description_ + """ + return self._seekable + + def mark(self) -> None: + """_summary_ + """ + if self.is_seekable() is False: + return + + if isinstance(self._body, IO): + self._io_curr = self._body.tell() + + def reset(self) -> None: + """_summary_ + """ + if self.is_seekable() is False: + return + + if isinstance(self._body, io_utils.TeeIterator): + self._body.reset() + + +class _Options: + """client level's configuration.""" + + def __init__( + self, + product: str, + region: str, + endpoint: Optional[ParseResult] = None, + retry_max_attempts: Optional[int] = None, + retryer: Optional[Retryer] = None, + signer: Optional[Signer] = None, + credentials_provider: Optional[CredentialsProvider] = None, + http_client: Optional[Union[HttpClient]] = None, + address_style: Optional[AddressStyle] = None, + readwrite_timeout: Optional[int] = None, + response_handlers: Optional[List] = None, + response_stream: Optional[bool] = None, + auth_method: Optional[str] = None, + feature_flags: Optional[int] = None, + ) -> None: + self.product = product + self.region = region + self.endpoint = endpoint + self.retry_max_attempts = retry_max_attempts + self.retryer = retryer + self.signer = signer + self.credentials_provider = credentials_provider + self.http_client = http_client + self.address_style = address_style + self.readwrite_timeout = readwrite_timeout + self.response_handlers = response_handlers or [] + self.response_stream= response_stream + self.auth_method = auth_method + self.feature_flags = feature_flags or FF_DEFAULT + + +class _ClientImplMixIn: + """Client implement""" + + def resolve_config(self, config: Config) ->_Options: + """convert config into client's options""" + + options = _default_options(config) + # Logger + + # Default UserAgent + + _resolve_endpoint(config, options) + _resolve_retryer(config, options) + _resolve_signer(config, options) + _resolve_address_style(config, options) + _resolve_feature_flags(config, options) + self._resolve_httpclient(config, options) # pylint: disable=no-member + + return options + + def resolve_kwargs(self, options: _Options, **kwargs): + """client's configuration from user by key/value args""" + + if len(kwargs) == 0: + return + + options.product = kwargs.get("product", options.product) + options.region = kwargs.get("region", options.region) + options.endpoint = kwargs.get("endpoint", options.endpoint) + options.retry_max_attempts = kwargs.get("retry_max_attempts", options.retry_max_attempts) + options.retryer = kwargs.get("retryer", options.retryer) + options.signer = kwargs.get("signer", options.signer) + options.credentials_provider = kwargs.get("credentials_provider", options.credentials_provider) + options.http_client = kwargs.get("http_client", options.http_client) + options.address_style = kwargs.get("address_style", options.address_style) + options.readwrite_timeout = kwargs.get("readwrite_timeout", options.readwrite_timeout) + options.auth_method = kwargs.get("auth_method", None) + + + def resolve_operation_kwargs(self, options: _Options, **kwargs): + """operation's configuration from user by key/value args""" + + if len(kwargs) == 0: + return + + options.retry_max_attempts = kwargs.get("retry_max_attempts", options.retry_max_attempts) + options.retryer = kwargs.get("retryer", options.retryer) + options.http_client = kwargs.get("http_client", options.http_client) + options.readwrite_timeout = kwargs.get("readwrite_timeout", options.readwrite_timeout) + options.auth_method = kwargs.get("auth_method", options.auth_method) + + def verify_operation(self, op_input: OperationInput, options: _Options) -> None: + """verify input and options""" + + if options.endpoint is None: + raise exceptions.ParamInvalidError(field="endpoint") + + if (op_input.bucket is not None and + not validation.is_valid_bucket_name(op_input.bucket)): + raise exceptions.BucketNameInvalidError( + name=utils.safety_str(op_input.bucket)) + + if (op_input.key is not None and + not validation.is_valid_object_name(op_input.key)): + raise exceptions.ObjectNameInvalidError() + + def apply_operation(self, options: _Options, op_input: OperationInput) -> None: + """_summary_""" + self._apply_operation_options(options) # pylint: disable=no-member + _apply_operation_metadata(op_input, options) + + + def build_request_context(self, op_input: OperationInput, options: _Options) -> SigningContext: + """_summary_ + """ + # host & path + url = _build_url(op_input, options) + + # queries + if op_input.parameters is not None: + query = urlencode(op_input.parameters, quote_via=quote) + if len(query) > 0: + url = url + "?" + query + + # build http request + request = HttpRequest(method=op_input.method, url=url) + + # headers + request.headers.update(op_input.headers or {}) + + # body + body = op_input.body or b'' + + # body tracker + if op_input.op_metadata is not None: + tracker = op_input.op_metadata.get("opm-request-body-tracker", None) + if tracker is not None: + writers = [] + for t in tracker: + if hasattr(t, 'write'): + writers.append(t) + if len(writers) > 0: + body = io_utils.TeeIterator.from_source(body, writers) + + request.body = body + + # signing context + context = SigningContext( + product=options.product, + region=options.region, + bucket=op_input.bucket, + key=op_input.key, + request=request, + ) + + if utils.safety_str(options.auth_method) == 'query': + context.auth_method_query = True + + oss_date = request.headers.get('x-oss-date', None) + if oss_date is not None: + context.signing_time = serde.deserialize_httptime(oss_date) + if (expiration_time := op_input.op_metadata.get('expiration_time', None)) is not None: + context.expiration_time = expiration_time + + return context + + def retry_max_attempts(self, options: _Options) -> int: + """_summary_""" + if options.retry_max_attempts is not None: + attempts = int(options.retry_max_attempts) + elif options.retryer is not None: + attempts = options.retryer.max_attempts() + else: + attempts = defaults.DEFAULT_MAX_ATTEMPTS + + return max(1, attempts) + + def has_feature(self, flag: int) -> bool: + """_summary_""" + return (self._options.feature_flags & flag) > 0 # pylint: disable=no-member + + def get_retry_attempts(self) -> bool: + """_summary_""" + return self.retry_max_attempts(self._options) # pylint: disable=no-member + +class _SyncClientImpl(_ClientImplMixIn): + """Sync API Client for common API.""" + + def __init__(self, config: Config, **kwargs) -> None: + options = self.resolve_config(config) + self.resolve_kwargs(options, kwargs=kwargs) + + self._config = config + self._options = options + + def invoke_operation(self, op_input: OperationInput, **kwargs) -> OperationOutput: + """_summary_ + + Args: + op_input (OperationInput): _description_ + + Raises: + exceptions.OperationError: _description_ + + Returns: + OperationOutput: _description_ + """ + + options = copy.copy(self._options) + self.resolve_operation_kwargs(options, **kwargs) + self.apply_operation(options, op_input) + + try: + self.verify_operation(op_input, options) + output = self._sent_request(op_input, options) + except Exception as err: + raise exceptions.OperationError( + name=op_input.op_name, + error=err, + ) + + return output + + def _resolve_httpclient(self, config: Config, options: _Options) -> None: + """httpclient""" + if options.http_client: + return + + kwargs: Dict[str, Any] = {} + + if bool(config.insecure_skip_verify): + kwargs["insecure_skip_verify"] = True + + if bool(config.enabled_redirect): + kwargs["enabled_redirect"] = True + + if config.connect_timeout: + kwargs["connect_timeout"] = config.connect_timeout + + if config.readwrite_timeout: + kwargs["readwrite_timeout"] = config.readwrite_timeout + + options.http_client = transport.RequestsHttpClient(kwargs=kwargs) + + + def _apply_operation_options(self, options: _Options) -> None: + # response handler + handlers = [] + + def service_error_response_handler(response: HttpResponse) -> None: + """ check service error """ + if response.status_code // 100 == 2: + return + + if not response.is_stream_consumed: + _ = response.read() + + raise _to_service_error(response) + + # insert service error responsed handler first + handlers.append(service_error_response_handler) + + handlers.extend(options.response_handlers) + + options.response_handlers = handlers + + def _sent_request(self, op_input: OperationInput, options: _Options) -> OperationOutput: + context = self.build_request_context(op_input, options) + response = self._sent_http_request(context, options) + output = OperationOutput( + status=response.reason, + status_code=response.status_code, + headers=response.headers, + op_input=op_input, + http_response=response + ) + + # save other info by Metadata filed + # output.op_metadata + if context.auth_method_query: + output.op_metadata['expiration_time'] = context.expiration_time + + # update clock offset + + return output + + def _sent_http_request(self, context: SigningContext, options: _Options) -> HttpResponse: + request = context.request + retryer = options.retryer + max_attempts = self.retry_max_attempts(options) + + # Mark body + marked_body = _MarkedBody(request.body) + marked_body.mark() + + reset_time = context.signing_time is None + error: Optional[Exception] = None + response: HttpResponse = None + for tries in range(max_attempts): + if tries > 0: + try: + marked_body.reset() + except: # pylint: disable=bare-except + # if meets reset error, just ignores, and retures last error + break + + if reset_time: + context.signing_time = None + + dealy = retryer.retry_delay(tries, error) + time.sleep(dealy) + + try: + error = None + response = self._sent_http_request_once(context, options) + break + except Exception as e: + error = e + + if marked_body.is_seekable() is False: + break + + if not retryer.is_error_retryable(error): + break + + if error is not None: + raise error + + return response + + def _sent_http_request_once(self, context: SigningContext, options: _Options) -> HttpResponse: + # sign request + if not isinstance(options.credentials_provider, AnonymousCredentialsProvider): + try: + cred = options.credentials_provider.get_credentials() + except Exception as e: + raise exceptions.CredentialsFetchError(error=e) + + if cred is None or not cred.has_keys(): + raise exceptions.CredentialsEmptyError() + + # update credentials + context.credentials = cred + + options.signer.sign(context) + + # send + send_kwargs = {} + if options.response_stream is not None: + send_kwargs['stream'] = options.response_stream + + response = options.http_client.send(context.request, **send_kwargs) + + # response handler + for h in options.response_handlers: + h(response) + + return response + +def _default_options(config: Config) -> _Options: + """convert config to options""" + return _Options( + product=defaults.DEFAULT_PRODUCT, + region=config.region, + retry_max_attempts=config.retry_max_attempts, + retryer=cast(Retryer, config.retryer), + credentials_provider=cast( + CredentialsProvider, config.credentials_provider), + http_client=cast(HttpClient, config.http_client), + ) + + +def _resolve_endpoint(config: Config, options: _Options) -> None: + """endpoint""" + disable_ssl = utils.safety_bool(config.disable_ssl) + endpoint = utils.safety_str(config.endpoint) + region = utils.safety_str(config.region) + if len(endpoint) > 0: + endpoint = endpoints.add_scheme(endpoint, disable_ssl) + elif validation.is_valid_region(region): + if bool(config.use_dualstack_endpoint): + etype = "dualstack" + elif bool(config.use_internal_endpoint): + etype = "internal" + elif bool(config.use_accelerate_endpoint): + etype = "accelerate" + else: + etype = "default" + + endpoint = endpoints.from_region(region, disable_ssl, etype) + + if endpoint == "": + return + + options.endpoint = urlparse(endpoint) + + +def _resolve_retryer(_: Config, options: _Options) -> None: + """retryer""" + if options.retryer: + return + + options.retryer = retry.StandardRetryer() + + +def _resolve_signer(config: Config, options: _Options) -> None: + """signer""" + if options.signer: + return + + if utils.safety_str(config.signature_version) == "v1": + options.signer = SignerV1() + else: + options.signer = SignerV4() + + +def _resolve_address_style(config: Config, options: _Options) -> None: + """address_style""" + if bool(config.use_cname): + style = AddressStyle.CName + elif bool(config.use_path_style): + style = AddressStyle.Path + else: + style = AddressStyle.Virtual + + # if the endpoint is ip, set to path-style + if options.endpoint: + hostname = options.endpoint.hostname + if endpoints.is_ip(hostname): + style = AddressStyle.Path + + options.address_style = style + + +def _resolve_feature_flags(_1: Config, _2: _Options) -> None: + """flags for feature""" + + +def _apply_operation_metadata(op_input: OperationInput, options: _Options) -> None: + handlers = op_input.op_metadata.get('opm-response-handler', None) + if handlers is not None: + options.response_handlers.extend(handlers) + + stream = op_input.op_metadata.get('response-stream', None) + if stream is not None: + options.response_stream = stream + + +def _build_url(op_input: OperationInput, options: _Options) -> str: + host = "" + paths = [] + if op_input.bucket is None: + host = options.endpoint.hostname + else: + if options.address_style == AddressStyle.Path: + host = options.endpoint.hostname + paths.append(op_input.bucket) + elif options.address_style == AddressStyle.CName: + host = options.endpoint.hostname + else: + host = f'{op_input.bucket}.{options.endpoint.hostname}' + + if op_input.key is not None: + paths.append(quote(op_input.key)) + + return f'{options.endpoint.scheme}://{host}/{"/".join(paths)}' + + +def _to_service_error(response: HttpResponse) -> exceptions.ServiceError: + timestamp = serde.deserialize_httptime(response.headers.get('Date')) + content = response.content or b'' + response.close() + + error_fileds = {} + code = 'BadErrorResponse' + message = '' + ec = '' + request_id = '' + err_body = b'' + try: + err_body = content + if len(err_body) == 0: + err_body = base64.b64decode( + response.headers.get('x-oss-err', '')) + root = ET.fromstring(err_body) + if root.tag == 'Error': + for child in root: + error_fileds[child.tag] = child.text + message = error_fileds.get('Message', '') + code = error_fileds.get('Code', '') + ec = error_fileds.get('EC', '') + request_id = error_fileds.get('RequestId', '') + else: + message = f'Expect root node Error, but get {root.tag}.' + except ET.ParseError as e: + errstr = err_body.decode() + if '' in errstr and '' in errstr: + m = re.search('(.*)', errstr) + if m: + code = m.group(1) + m = re.search('(.*)', errstr) + if m: + message = m.group(1) + if len(message) == 0: + message = f'Failed to parse xml from response body due to: {str(e)}. With part response body {err_body[:256]}.' + except Exception as e: + message = f'The body of the response was not readable, due to : {str(e)}.' + + return exceptions.ServiceError( + status_code=response.status_code, + code=code, + message=message, + request_id=request_id or response.headers.get('x-oss-request-id', ''), + ec=ec or response.headers.get('x-oss-ec', ''), + timestamp=timestamp, + request_target=f'{response.request.method} {response.request.url}', + snapshot=content, + headers=response.headers, + error_fileds=error_fileds + ) + diff --git a/alibabacloud_oss_v2/_version.py b/alibabacloud_oss_v2/_version.py new file mode 100644 index 0000000..ccfcbd1 --- /dev/null +++ b/alibabacloud_oss_v2/_version.py @@ -0,0 +1 @@ +VERSION = "1.0.0.dev" \ No newline at end of file diff --git a/alibabacloud_oss_v2/checkpoint.py b/alibabacloud_oss_v2/checkpoint.py new file mode 100644 index 0000000..1ce2aa5 --- /dev/null +++ b/alibabacloud_oss_v2/checkpoint.py @@ -0,0 +1,385 @@ +import os +import json +from typing import MutableMapping, Dict +from urllib.parse import quote +from tempfile import gettempdir +import hashlib +from .models import GetObjectRequest, PutObjectRequest +from .utils import safety_str, parse_http_range +from .io_utils import LimitReader +from .crc import Crc64 +from .defaults import ( + CHECKPOINT_FILE_SUFFIX_DOWNLOADER, + CHECKPOINT_FILE_SUFFIX_UPLOADER, + CHECKPOINT_MAGIC +) + +# ----- download checkpoint ----- +# { +# "CpDirPath": string, // checkpoint dir full path +# "CpFilePath": string, // checkpoint file full path +# "VerifyData": bool, // verify downloaded data in FilePath +# "Loaded": bool, // If Info.Data.DownloadInfo is loaded from checkpoint +# "Info": { // checkpoint data +# "Magic": string, // Magic +# "MD5": string, // The Data's MD5 +# "Data": { +# "ObjectInfo": { // source +# "Name": string, +# "VersionId": string, +# "Range": string, +# }, +# "ObjectMeta": { +# "Size": int, +# "LastModified": string, +# "ETag": string, +# }, +# "FilePath": string, // destination, Local file +# "PartSize": int, +# +# "DownloadInfo": { // download info +# "Offset": int, +# "CRC64": int +# }, +# } +# }, +# } + +class DownloadCheckpoint: + """_summary_ + """ + def __init__( + self, + request: GetObjectRequest, + filepath: str, + basedir: str, + headers: MutableMapping, + part_size: int + ) -> None: + name = f'{request.bucket}/{request.key}' + canon_src = f'oss://{quote(name)}\n{safety_str(request.version_id)}\n{safety_str(request.range_header)}' + h = hashlib.md5() + h.update(canon_src.encode()) + src_hash = h.hexdigest() + + absfilepath = os.path.abspath(filepath) + h = hashlib.md5() + h.update(absfilepath.encode()) + dst_hash = h.hexdigest() + + if not basedir: + dirbase = gettempdir() + else: + dirbase = os.path.dirname(basedir) + + cp_filepath = os.path.join(dirbase, f'{src_hash}-{dst_hash}{CHECKPOINT_FILE_SUFFIX_DOWNLOADER}') + object_size = int(headers.get("Content-Length")) + + self.cp_filepath = cp_filepath + self.cp_dirpath = dirbase + self.verify_data = False + self.loaded = False + self.cp_info = { + "Magic": CHECKPOINT_MAGIC, + #"MD5": md5hex, + "Data": { + "ObjectInfo": { + "Name": f'oss://{name}', + "VersionId": safety_str(request.version_id), + "Range": safety_str(request.range_header), + }, + "ObjectMeta": { + "Size": object_size, + "LastModified": headers.get("Last-Modified", ''), + "ETag": headers.get("ETag", ''), + }, + "FilePath": filepath, + "PartSize": part_size, + }, + } + self.doffset = 0 + self.dcrc64 = 0 + + def load(self): + """load checkpoint from local file + """ + if len(self.cp_dirpath) > 0 and not os.path.isdir(self.cp_dirpath): + raise ValueError(f'Invaid checkpoint dir {self.cp_dirpath}') + + if not os.path.isfile(self.cp_filepath): + return + + if not self._is_valid(): + self.remove() + return + + self.loaded = True + + def _is_valid(self) -> bool: + try: + dcp_info = {} + with open(self.cp_filepath, 'rb') as f: + dcp_info = json.loads(f.read()) + if not isinstance(dcp_info, Dict): + return False + + js = json.dumps(dcp_info.get("Data", {})).encode() + h = hashlib.md5() + h.update(js) + md5sum = h.hexdigest() + + if (CHECKPOINT_MAGIC != dcp_info.get("Magic") or + md5sum != dcp_info.get("MD5")): + return False + + cpid = self.cp_info["Data"] + dcpid = dcp_info["Data"] + + #compare + if (cpid["ObjectInfo"] != dcpid["ObjectInfo"] or + cpid["ObjectMeta"] != dcpid["ObjectMeta"] or + cpid["FilePath"] != dcpid["FilePath"] or + cpid["PartSize"] != dcpid["PartSize"]): + return False + + #download info + offset = dcpid["DownloadInfo"].get('Offset', 0) + crc64 = dcpid["DownloadInfo"].get('CRC64', 0) + if (not isinstance(offset, int) or + not isinstance(crc64, int)): + return False + + if offset == 0 and crc64 != 0: + return False + + roffset = 0 + if len(cpid["ObjectInfo"]["Range"]) > 0: + range_header = parse_http_range(cpid["ObjectInfo"]["Range"]) + if offset < range_header[0]: + return False + roffset = range_header[0] + + remains = (offset - roffset) % dcpid["PartSize"] + if remains != 0: + return False + + #valid data + if self.verify_data and crc64 != 0: + try: + with open(dcpid["FilePath"], 'rb') as f: + chash = Crc64(0) + limitn = offset - roffset + r = LimitReader(f, limitn) + chunk = 32 * 1024 + for _ in range(0, limitn, chunk): + chash.write(r.read(chunk)) + if chash.sum64() != crc64: + return False + except Exception: + return False + + self.doffset = offset + self.dcrc64 = crc64 + + return True + except Exception: + #print(f"err = {err}") + pass + + return False + + def dump(self) -> bool: + """_summary_ + + Returns: + _type_: _description_ + """ + #Calculate MD5 + self.cp_info["Data"]["DownloadInfo"] = { + "Offset": self.doffset, + "CRC64": self.dcrc64 + } + js = json.dumps(self.cp_info["Data"]).encode() + h = hashlib.md5() + h.update(js) + self.cp_info["MD5"] = h.hexdigest() + + #Serialize + try: + js = json.dumps(self.cp_info).encode() + with open(self.cp_filepath, 'wb') as f: + f.write(js) + except (OSError, ValueError): + return False + + return True + + def remove(self) -> None: + try: + os.remove(self.cp_filepath) + except (OSError, ValueError): + pass + +# ----- upload checkpoint ----- +# { +# "CpDirPath": string, // checkpoint dir full path +# "CpFilePath": string, // checkpoint file full path +# "Loaded": bool, // If Info.Data.DownloadInfo is loaded from checkpoint +# "Info": { // checkpoint data +# "Magic": string, // Magic +# "MD5": string, // The Data's MD5 +# "Data": { +# "FilePath": string, // source, Local file +# "FileMeta": { // source +# "Size": int, +# "LastModified": string, +# }, +# "ObjectInfo": { +# "Name": string, +# }, +# +# "PartSize": int, +# "UploadInfo": { // upload info +# "UploadId": string, +# }, +# } +# }, +# } + +class UploadCheckpoint: + """_summary_ + """ + def __init__( + self, + request: PutObjectRequest, + filepath: str, + basedir: str, + fileinfo: os.stat_result, + part_size: int + ) -> None: + name = f'{request.bucket}/{request.key}' + canon_dst = f'oss://{quote(name)}' + h = hashlib.md5() + h.update(canon_dst.encode()) + dst_hash = h.hexdigest() + + absfilepath = os.path.abspath(filepath) + h = hashlib.md5() + h.update(absfilepath.encode()) + src_hash = h.hexdigest() + + if len(basedir) == 0: + dirbase = gettempdir() + else: + dirbase = os.path.dirname(basedir) + + cp_filepath = os.path.join(dirbase, f'{src_hash}-{dst_hash}{CHECKPOINT_FILE_SUFFIX_UPLOADER}') + + self.cp_filepath = cp_filepath + self.cp_dirpath = dirbase + self.loaded = False + self.cp_info = { + "Magic": CHECKPOINT_MAGIC, + #"MD5": md5hex, + "Data": { + "FilePath": filepath, + "FileMeta": { + "Size": fileinfo.st_size, + "LastModified": fileinfo.st_mtime, + }, + "ObjectInfo": { + "Name": f'oss://{name}', + }, + "PartSize": part_size, + }, + } + self.upload_id = '' + + def load(self): + """load checkpoint from local file + """ + if len(self.cp_dirpath) > 0 and not os.path.isdir(self.cp_dirpath): + raise ValueError(f'Invaid checkpoint dir {self.cp_dirpath}') + + if not os.path.isfile(self.cp_filepath): + return + + if not self._is_valid(): + self.remove() + return + + self.loaded = True + + def _is_valid(self) -> bool: + try: + ucp_info = {} + with open(self.cp_filepath, 'rb') as f: + ucp_info = json.loads(f.read()) + if not isinstance(ucp_info, Dict): + return False + + js = json.dumps(ucp_info.get("Data", {})).encode() + h = hashlib.md5() + h.update(js) + md5sum = h.hexdigest() + + if (CHECKPOINT_MAGIC != ucp_info.get("Magic") or + md5sum != ucp_info.get("MD5")): + return False + + cpid = self.cp_info["Data"] + ucpid = ucp_info["Data"] + + #compare + if (cpid["ObjectInfo"] != ucpid["ObjectInfo"] or + cpid["FileMeta"] != ucpid["FileMeta"] or + cpid["FilePath"] != ucpid["FilePath"] or + cpid["PartSize"] != ucpid["PartSize"]): + return False + + #upload info + uploadid = ucpid["UploadInfo"]["UploadId"] + if not isinstance(uploadid, str) or uploadid == '': + return False + + self.upload_id = uploadid + + return True + except Exception: + #print(f"err = {err}") + pass + + return False + + def dump(self) -> bool: + """_summary_ + + Returns: + _type_: _description_ + """ + #Calculate MD5 + self.cp_info["Data"]["UploadInfo"] = { + "UploadId": self.upload_id, + } + js = json.dumps(self.cp_info["Data"]).encode() + h = hashlib.md5() + h.update(js) + self.cp_info["MD5"] = h.hexdigest() + + #Serialize + try: + js = json.dumps(self.cp_info).encode() + with open(self.cp_filepath, 'wb') as f: + f.write(js) + except (OSError, ValueError): + return False + + return True + + def remove(self) -> None: + """_summary_ + """ + try: + os.remove(self.cp_filepath) + except (OSError, ValueError): + pass diff --git a/alibabacloud_oss_v2/client.py b/alibabacloud_oss_v2/client.py new file mode 100644 index 0000000..116393d --- /dev/null +++ b/alibabacloud_oss_v2/client.py @@ -0,0 +1,904 @@ +# pylint: disable=line-too-long +"""_summary_""" +import copy +from typing import Optional +from .config import Config +from .types import OperationInput, OperationOutput +from ._client import _SyncClientImpl, FF_ENABLE_CRC64_CHECK_DOWNLOAD +from . import models +from . import operations +from . import exceptions +from .downloader import Downloader +from .uploader import Uploader +from .progress import Progress +from .crc import Crc64 +from .paginator import ( + ListObjectsPaginator, + ListObjectsV2Paginator, + ListObjectVersionsPaginator, + ListBucketsPaginator, + ListPartsPaginator, + ListMultipartUploadsPaginator +) +from .presigner import ( + PresignRequest, + PresignResult, + presign_inner +) +from .filelike import AppendOnlyFile, ReadOnlyFile + +class Client: + """_summary_ + """ + + def __init__(self, config: Config, **kwargs) -> None: + """_summary_ + + Args: + config (Config): _description_ + """ + self._client = _SyncClientImpl(config, **kwargs) + + def __repr__(self) -> str: + return "" + + def invoke_operation(self, op_input: OperationInput, **kwargs + ) -> OperationOutput: + """_summary_ + + Args: + op_input (OperationInput): _description_ + + Returns: + OperationOutput: _description_ + """ + return self._client.invoke_operation(op_input, **kwargs) + + # sevice + def list_buckets(self, request: models.ListBucketsRequest, **kwargs + ) -> models.ListBucketsResult: + """ + Lists all buckets that belong to your Alibaba Cloud account. + + Args: + request (ListBucketsRequest): Request parameters for ListBuckets operation. + + Returns: + ListBucketsResult: Reponse result for ListBuckets operation. + """ + + return operations.list_buckets(self._client, request, **kwargs) + + # region + def describe_regions(self, request: models.DescribeRegionsRequest, **kwargs + ) -> models.DescribeRegionsResult: + """ + Queries the endpoints of all regions supported by Object Storage Service (OSS) + or a specific region, including public endpoints, internal endpoints, + and acceleration endpoints. + + Args: + request (DescribeRegionsRequest): Request parameters for DescribeRegions operation. + + Returns: + DescribeRegionsResult: Response result for DescribeRegions operation. + """ + + return operations.describe_regions(self._client, request, **kwargs) + + # bucket + def put_bucket(self, request: models.PutBucketRequest, **kwargs + ) -> models.PutBucketResult: + """ + Creates a bucket. + + Args: + request (PutBucketRequest): Request parameters for PutBucket operation. + + Returns: + PutBucketResult: Reponse result for PutBucket operation. + """ + + return operations.put_bucket(self._client, request, **kwargs) + + def delete_bucket(self, request: models.DeleteBucketRequest, **kwargs + ) -> models.DeleteBucketResult: + """ + Deletes a bucket. + + Args: + request (DeleteBucketRequest): Request parameters for DeleteBucket operation. + + Returns: + DeleteBucketResult: Reponse result for DeleteBucket operation. + """ + + return operations.delete_bucket(self._client, request, **kwargs) + + def list_objects(self, request: models.ListObjectsRequest, **kwargs + ) -> models.ListObjectsResult: + """ + Lists information about all objects in an Object Storage Service (OSS) bucket. + + Args: + request (ListObjectsRequest): Request parameters for ListObjects operation. + + Returns: + ListObjectsResult: Reponse result for ListObjects operation. + """ + + return operations.list_objects(self._client, request, **kwargs) + + def put_bucket_acl(self, request: models.PutBucketAclRequest, **kwargs + ) -> models.PutBucketAclResult: + """ + You can call this operation to configure or modify the ACL of a bucket. + + Args: + request (PutBucketAclRequest): Request parameters for PutBucketAcl operation. + + Returns: + PutBucketAclResult: Response result for PutBucketAcl operation. + """ + + return operations.put_bucket_acl(self._client, request, **kwargs) + + def get_bucket_acl(self, request: models.GetBucketAclRequest, **kwargs + ) -> models.GetBucketAclResult: + """ + You can call this operation to query the ACL of a bucket. + Only the bucket owner can query the ACL of the bucket. + + Args: + request (GetBucketAclRequest): Request parameters for GetBucketAcl operation. + + Returns: + GetBucketAclResult: Response result for GetBucketAcl operation. + """ + + return operations.get_bucket_acl(self._client, request, **kwargs) + + def list_objects_v2(self, request: models.ListObjectsV2Request, **kwargs + ) -> models.ListObjectsV2Result: + """ + Lists all objects in a bucket. + + Args: + request (ListObjectsV2Request): Request parameters for ListObjectsV2 operation. + + Returns: + ListObjectsV2Result: Reponse result for ListObjectsV2 operation. + """ + + return operations.list_objects_v2(self._client, request, **kwargs) + + def get_bucket_stat(self, request: models.GetBucketStatRequest, **kwargs + ) -> models.GetBucketStatResult: + """ + GetBucketStat Queries the storage capacity of a specified bucket and + the number of objects that are stored in the bucket. + + Args: + request (GetBucketStatRequest): Request parameters for GetBucketStat operation. + + Returns: + GetBucketStatResult: Response result for GetBucketStat operation. + """ + + return operations.get_bucket_stat(self._client, request, **kwargs) + + def get_bucket_location(self, request: models.GetBucketLocationRequest, **kwargs + ) -> models.GetBucketLocationResult: + """ + GetBucketLocation Queries the region of an Object Storage Service (OSS) bucket. + + Args: + request (GetBucketLocationRequest): Request parameters for GetBucketLocation operation. + + Returns: + GetBucketLocationResult: Response result for GetBucketLocation operation. + """ + + return operations.get_bucket_location(self._client, request, **kwargs) + + def get_bucket_info(self, request: models.GetBucketInfoRequest, **kwargs + ) -> models.GetBucketInfoResult: + """ + GetBucketInfo Queries information about a bucket. + + Args: + request (GetBucketInfoRequest): Request parameters for GetBucketInfo operation. + + Returns: + GetBucketInfoResult: Response result for GetBucketInfo operation. + """ + + return operations.get_bucket_info(self._client, request, **kwargs) + + def put_bucket_versioning(self, request: models.PutBucketVersioningRequest, **kwargs + ) -> models.PutBucketVersioningResult: + """ + PutBucketVersioning Configures the versioning state for a bucket. + + Args: + request (PutBucketVersioningRequest): Request parameters for PutBucketVersioning operation. + + Returns: + PutBucketVersioningResult: Response result for PutBucketVersioning operation. + """ + + return operations.put_bucket_versioning(self._client, request, **kwargs) + + def get_bucket_versioning(self, request: models.GetBucketVersioningRequest, **kwargs + ) -> models.GetBucketVersioningResult: + """ + GetBucketVersioning You can call this operation to query the versioning state of a bucket. + + Args: + request (GetBucketVersioningRequest): Request parameters for GetBucketVersioning operation. + + Returns: + GetBucketVersioningResult: Response result for GetBucketVersioning operation. + """ + + return operations.get_bucket_versioning(self._client, request, **kwargs) + + def list_object_versions(self, request: models.ListObjectVersionsRequest, **kwargs + ) -> models.ListObjectVersionsResult: + """ + ListObjectVersions Lists the versions of all objects in a bucket, including delete markers. + + Args: + request (ListObjectVersionsRequest): Request parameters for ListObjectVersions operation. + + Returns: + ListObjectVersionsResult: Reponse result for ListObjectVersions operation. + """ + + return operations.list_object_versions(self._client, request, **kwargs) + + # object + def put_object(self, request: models.PutObjectRequest, **kwargs + ) -> models.PutObjectResult: + """ + Uploads objects. + + Args: + request (PutObjectRequest): Request parameters for PutObject operation. + + Returns: + PutObjectResult: Reponse result for PutObject operation. + """ + + return operations.put_object(self._client, request, **kwargs) + + def get_object(self, request: models.GetObjectRequest, **kwargs + ) -> models.GetObjectResult: + """ + Queries an object. To call this operation, you must have read permissions on the object. + + Args: + request (GetObjectRequest): Request parameters for GetObject operation. + + Returns: + GetObjectResult: Reponse result for GetObject operation. + """ + + return operations.get_object(self._client, request, **kwargs) + + def copy_object(self, request: models.CopyObjectRequest, **kwargs + ) -> models.CopyObjectResult: + """ + Copies objects within a bucket or between buckets in the same region. + + Args: + request (CopyObjectRequest): Request parameters for CopyObject operation. + + Returns: + CopyObjectResult: Reponse result for CopyObject operation. + """ + + return operations.copy_object(self._client, request, **kwargs) + + def append_object(self, request: models.AppendObjectRequest, **kwargs + ) -> models.AppendObjectResult: + """ + Uploads an object by appending the object to an existing object. + Objects created by using the AppendObject operation are appendable objects. + + Args: + request (AppendObjectRequest): Request parameters for AppendObject operation. + + Returns: + AppendObjectResult: Reponse result for AppendObject operation. + """ + + return operations.append_object(self._client, request, **kwargs) + + def delete_object(self, request: models.DeleteObjectRequest, **kwargs + ) -> models.DeleteObjectResult: + """ + Deletes an object. + + Args: + request (DeleteObjectRequest): Request parameters for DeleteObject operation. + + Returns: + DeleteObjectResult: Reponse result for DeleteObject operation. + """ + + return operations.delete_object(self._client, request, **kwargs) + + def delete_multiple_objects(self, request: models.DeleteMultipleObjectsResult, **kwargs + ) -> models.DeleteMultipleObjectsResult: + """ + Deletes multiple objects from a bucket. + + Args: + request (DeleteMultipleObjectsResult): Request parameters for DeleteMultipleObjects operation. + + Returns: + DeleteMultipleObjectsResult: Reponse result for DeleteMultipleObjects operation. + """ + + return operations.delete_multiple_objects(self._client, request, **kwargs) + + def head_object(self, request: models.HeadObjectRequest, **kwargs + ) -> models.HeadObjectResult: + """ + Queries information about the object in a bucket. + + Args: + request (HeadObjectRequest): Request parameters for HeadObject operation. + + Returns: + HeadObjectResult: Reponse result for HeadObject operation. + """ + + return operations.head_object(self._client, request, **kwargs) + + def get_object_meta(self, request: models.GetObjectMetaRequest, **kwargs + ) -> models.GetObjectMetaResult: + """ + Queries the metadata of an object, including ETag, Size, and LastModified. + + Args: + request (GetObjectMetaRequest): Request parameters for GetObjectMeta operation. + + Returns: + GetObjectMetaResult: Reponse result for GetObjectMeta operation. + """ + + return operations.get_object_meta(self._client, request, **kwargs) + + def restore_object(self, request: models.RestoreObjectRequest, **kwargs + ) -> models.RestoreObjectResult: + """ + Restores Archive, Cold Archive, or Deep Cold Archive objects. + + Args: + request (RestoreObjectRequest): Request parameters for RestoreObject operation. + + Returns: + RestoreObjectResult: Reponse result for RestoreObject operation. + """ + + return operations.restore_object(self._client, request, **kwargs) + + def put_object_acl(self, request: models.PutObjectAclRequest, **kwargs + ) -> models.PutObjectAclResult: + """ + You can call this operation to modify the access control list (ACL) of an object. + + Args: + request (PutObjectAclRequest): Request parameters for PutObjectAcl operation. + + Returns: + PutObjectAclResult: Reponse result for PutObjectAcl operation. + """ + + return operations.put_object_acl(self._client, request, **kwargs) + + def get_object_acl(self, request: models.GetObjectAclRequest, **kwargs + ) -> models.GetObjectAclResult: + """ + Queries the access control list (ACL) of an object in a bucket. + + Args: + request (GetObjectAclRequest): Request parameters for GetObjectAcl operation. + + Returns: + GetObjectAclResult: Reponse result for GetObjectAcl operation. + """ + + return operations.get_object_acl(self._client, request, **kwargs) + + def initiate_multipart_upload(self, request: models.InitiateMultipartUploadRequest, **kwargs + ) -> models.InitiateMultipartUploadResult: + """ + Initiates a multipart upload task before you can upload data in parts to Object Storage Service (OSS). + + Args: + request (InitiateMultipartUploadRequest): Request parameters for InitiateMultipartUpload operation. + + Returns: + InitiateMultipartUploadResult: Reponse result for InitiateMultipartUpload operation. + """ + + return operations.initiate_multipart_upload(self._client, request, **kwargs) + + def upload_part(self, request: models.UploadPartRequest, **kwargs + ) -> models.UploadPartResult: + """ + Call the UploadPart interface to upload data in blocks (parts) based on the specified Object name and uploadId. + + Args: + request (UploadPartRequest): Request parameters for UploadPart operation. + + Returns: + UploadPartResult: Reponse result for UploadPart operation. + """ + + return operations.upload_part(self._client, request, **kwargs) + + def upload_part_copy(self, request: models.UploadPartCopyRequest, **kwargs + ) -> models.UploadPartCopyResult: + """ + You can call this operation to copy data from an existing object to upload a part + by adding a x-oss-copy-request header to UploadPart. + + Args: + request (UploadPartCopyRequest): Request parameters for UploadPartCopy operation. + + Returns: + UploadPartCopyResult: Reponse result for UploadPartCopy operation. + """ + + return operations.upload_part_copy(self._client, request, **kwargs) + + def complete_multipart_upload(self, request: models.CompleteMultipartUploadRequest, **kwargs + ) -> models.CompleteMultipartUploadResult: + """ + Completes the multipart upload task of an object after all parts of the object are uploaded. + + Args: + request (CompleteMultipartUploadRequest): Request parameters for CompleteMultipartUpload operation. + + Returns: + CompleteMultipartUploadResult: Reponse result for CompleteMultipartUpload operation. + """ + + return operations.complete_multipart_upload(self._client, request, **kwargs) + + def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, **kwargs + ) -> models.AbortMultipartUploadResult: + """ + Cancels a multipart upload task and deletes the parts uploaded in the task. + + Args: + request (AbortMultipartUploadRequest): Request parameters for AbortMultipartUpload operation. + + Returns: + AbortMultipartUploadResult: Reponse result for AbortMultipartUpload operation. + """ + + return operations.abort_multipart_upload(self._client, request, **kwargs) + + def list_multipart_uploads(self, request: models.ListMultipartUploadsRequest, **kwargs + ) -> models.ListMultipartUploadsResult: + """ + Lists all multipart upload tasks in progress. The tasks are not completed or canceled. + + Args: + request (ListMultipartUploadsRequest): Request parameters for ListMultipartUploads operation. + + Returns: + ListMultipartUploadsResult: Reponse result for ListMultipartUploads operation. + """ + + return operations.list_multipart_uploads(self._client, request, **kwargs) + + def list_parts(self, request: models.ListPartsRequest, **kwargs + ) -> models.ListPartsResult: + """ + Lists all parts that are uploaded by using a specified upload ID. + + Args: + request (ListPartsRequest): Request parameters for ListParts operation. + + Returns: + ListPartsResult: Reponse result for ListParts operation. + """ + + return operations.list_parts(self._client, request, **kwargs) + + def put_symlink(self, request: models.PutSymlinkRequest, **kwargs + ) -> models.PutSymlinkResult: + """ + Creates a symbolic link that points to a destination object. + You can use the symbolic link to access the destination object. + + Args: + request (PutSymlinkRequest): Request parameters for PutSymlink operation. + + Returns: + PutSymlinkResult: Reponse result for PutSymlink operation. + """ + + return operations.put_symlink(self._client, request, **kwargs) + + def get_symlink(self, request: models.GetSymlinkRequest, **kwargs + ) -> models.GetSymlinkResult: + """ + Obtains a symbol link. To perform GetSymlink operations, you must have the read permission on the symbol link. + + Args: + request (GetSymlinkRequest): Request parameters for GetSymlink operation. + + Returns: + GetSymlinkResult: Reponse result for GetSymlink operation. + """ + + return operations.get_symlink(self._client, request, **kwargs) + + def put_object_tagging(self, request: models.PutObjectTaggingRequest, **kwargs + ) -> models.PutObjectTaggingResult: + """ + Adds tags to an object or updates the tags added to the object. Each tag added to an object is a key-value pair. + + Args: + request (PutObjectTaggingRequest): Request parameters for PutObjectTagging operation. + + Returns: + PutObjectTaggingResult: Reponse result for PutObjectTagging operation. + """ + + return operations.put_object_tagging(self._client, request, **kwargs) + + def get_object_tagging(self, request: models.GetObjectTaggingRequest, **kwargs + ) -> models.GetObjectTaggingResult: + """ + You can call this operation to query the tags of an object. + + Args: + request (GetObjectTaggingRequest): Request parameters for GetObjectTagging operation. + + Returns: + GetObjectTaggingResult: Reponse result for GetObjectTagging operation. + """ + + return operations.get_object_tagging(self._client, request, **kwargs) + + def delete_object_tagging(self, request: models.DeleteObjectTaggingRequest, **kwargs + ) -> models.DeleteObjectTaggingResult: + """ + You can call this operation to delete the tags of a specified object. + + Args: + request (DeleteObjectTaggingRequest): Request parameters for DeleteObjectTagging operation. + + Returns: + DeleteObjectTaggingResult: Reponse result for DeleteObjectTagging operation. + """ + + return operations.delete_object_tagging(self._client, request, **kwargs) + + def process_object(self, request: models.ProcessObjectRequest, **kwargs + ) -> models.ProcessObjectResult: + """ + Applies process on the specified image file. + + Args: + request (ProcessObjectRequest): Request parameters for ProcessObject operation. + + Returns: + ProcessObjectResult: Reponse result for ProcessObject operation. + """ + + return operations.process_object(self._client, request, **kwargs) + + def async_process_object(self, request: models.AsyncProcessObjectRequest, **kwargs + ) -> models.AsyncProcessObjectResult: + """ + Applies async process on the specified image file. + + Args: + request (AsyncProcessObjectRequest): Request parameters for AsyncProcessObject operation. + + Returns: + AsyncProcessObjectResult: Reponse result for AsyncProcessObject operation. + """ + + return operations.async_process_object(self._client, request, **kwargs) + + # presigner + def presign(self, request: PresignRequest, **kwargs) -> PresignResult: + """Generates the presigned URL. + If you do not specify expires or expiration, the pre-signed URL uses 15 minutes as default. + + Args: + request (PresignRequest): Request parameters for presign operation. + expires (datetime.timedelta, optional): The expiration duration for the presigned url. + expiration (datetime.datetime, optional):The expiration time for the presigned url. + Returns: + PresignResult: Response result for presign operation. + """ + return presign_inner(self._client, request, **kwargs) + + # paginator + def list_objects_paginator(self, **kwargs) -> ListObjectsPaginator: + """Creates a paginator for ListObjects + + Returns: + ListObjectsPaginator: a paginator for ListObjects + """ + return ListObjectsPaginator(self, **kwargs) + + def list_objects_v2_paginator(self, **kwargs) -> ListObjectsPaginator: + """Creates a paginator for ListObjectsV2 + + Returns: + ListObjectsV2Paginator: a paginator for ListObjectsV2 + """ + return ListObjectsV2Paginator(self, **kwargs) + + def list_object_versions_paginator(self, **kwargs) -> ListObjectVersionsPaginator: + """Creates a paginator for ListObjectVersions + + Returns: + ListObjectVersionsPaginator: a paginator for ListObjectVersions + """ + return ListObjectVersionsPaginator(self, **kwargs) + + + def list_buckets_paginator(self, **kwargs) -> ListBucketsPaginator: + """Creates a paginator for ListBuckets + + Returns: + ListBucketsPaginator: a paginator for ListBuckets + """ + return ListBucketsPaginator(self, **kwargs) + + def list_parts_paginator(self, **kwargs) -> ListPartsPaginator: + """Creates a paginator for ListParts + + Returns: + ListPartsPaginator: a paginator for ListParts + """ + return ListPartsPaginator(self, **kwargs) + + def list_multipart_uploads_paginator(self, **kwargs) -> ListMultipartUploadsPaginator: + """Creates a paginator for ListMultipartUploads + + Returns: + ListMultipartUploadsPaginator: a paginator for ListMultipartUploads + """ + return ListMultipartUploadsPaginator(self, **kwargs) + + + # transfer managers + def downloader(self, **kwargs) -> Downloader: + """_summary_ + + Args: + + Returns: + Downloader: _description_ + """ + return Downloader(self, **kwargs) + + def uploader(self, **kwargs) -> Uploader: + """_summary_ + + Returns: + Uploader: _description_ + """ + return Uploader(self, **kwargs) + + # file like objects + def appen_file(self, bucket: str, key: str, + request_payer: Optional[str] = None, + create_parameter: Optional[models.AppendObjectRequest] = None, + **kwargs) -> AppendOnlyFile: + """Opens or creates the named file for appending + + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + create_parameter (AppendObjectRequest, optional): The parameters when the object is first generated, supports below + CacheControl, ContentEncoding, Expires, ContentType, ContentType, Metadata,SSE's parameters, Acl, StorageClass, Tagging. + If the object exists, ignore this parameters + + Returns: + AppendOnlyFile: _description_ + """ + _ = kwargs + return AppendOnlyFile( + self, + bucket=bucket, + key=key, + request_payer=request_payer, + create_parameter=create_parameter + ) + + def open_file(self, bucket: str, key: str, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs) -> ReadOnlyFile: + """Opens the named file for reading. + + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + + Returns: + ReadOnlyFile: _description_ + """ + return ReadOnlyFile( + self, + bucket=bucket, + key=key, + version_id=version_id, + request_payer=request_payer, + **kwargs + ) + + # others apis + def is_object_exist(self, bucket: str, key: str, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs) -> bool: + """Checks if the object exists + + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the source object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + Returns: + bool: True if the object exists, else False. + """ + + result = None + err = None + + try: + result = self.get_object_meta(models.GetObjectMetaRequest( + bucket=bucket, + key=key, + version_id=version_id, + request_payer=request_payer, + **kwargs + )) + except exceptions.OperationError as e: + err = e + se = e.unwrap() + if isinstance(se, exceptions.ServiceError): + if ('NoSuchKey' == se.code or + (404 == se.status_code and 'BadErrorResponse' == se.code)): + return False + + if err is not None: + raise err + + return result is not None + + def is_bucket_exist(self, bucket: str, request_payer: Optional[str] = None, **kwargs) -> bool: + """Checks if the bucket exists + + Args: + bucket (str, required): The name of the bucket. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + Returns: + bool: True if the bucket exists, else False. + """ + + result = None + err = None + + try: + result = self.get_bucket_acl(models.GetBucketAclRequest( + bucket=bucket, + request_payer=request_payer, + **kwargs + )) + except exceptions.OperationError as e: + err = e + se = e.unwrap() + if isinstance(se, exceptions.ServiceError): + return not 'NoSuchBucket' == se.code + + if err is not None: + raise err + + return result is not None + + def put_object_from_file(self, request: models.PutObjectRequest, filepath: str, **kwargs) -> models.PutObjectResult: + """_summary_ + + Args: + request (models.PutObjectRequest): _description_ + filepath (str): _description_ + + Returns: + models.PutObjectResult: _description_ + """ + with open(filepath, 'rb') as f: + req = copy.copy(request) + req.body = f + return self.put_object(req, **kwargs) + + def get_object_to_file(self, request: models.GetObjectRequest, filepath: str, **kwargs) -> models.GetObjectResult: + """_summary_ + + Args: + request (models.GetObjectRequest): _description_ + filepath (str): _description_ + + Returns: + models.GetObjectResult: _description_ + """ + prog = None + if request.progress_fn: + prog = Progress(request.progress_fn, -1) + + chash = None + if self._client.has_feature(FF_ENABLE_CRC64_CHECK_DOWNLOAD): + chash = Crc64(0) + + def _crc_checker(headers): + if chash is None: + return + + scrc = headers.get('x-oss-hash-crc64ecma', None) + if scrc is None: + return + ccrc = str(chash.sum64()) + + if scrc != ccrc: + raise exceptions.InconsistentError( + client_crc=ccrc, + server_crc=scrc + ) + + def _get_object_to_file_no_retry(client: Client, request: models.GetObjectRequest, filepath: str, **kwargs): + with open(filepath, 'wb') as f: + err = None + result = client.get_object(request, **kwargs) + if prog: + prog._total = result.content_length + + try: + for d in result.body.iter_bytes(): + f.write(d) + if prog: + prog.write(d) + if chash: + chash.write(d) + _crc_checker(result.headers) + except Exception as e: + err = e + + result.body.close() + return result, err + + result = None + err = None + for _ in range(1, self._client.get_retry_attempts()): + result, err = _get_object_to_file_no_retry(self, request, filepath, **kwargs) + if err is None: + break + + if prog: + prog.reset() + + if chash: + chash.reset() + + if err is not None: + raise err + + return result diff --git a/alibabacloud_oss_v2/compat.py b/alibabacloud_oss_v2/compat.py new file mode 100644 index 0000000..e69de29 diff --git a/alibabacloud_oss_v2/config.py b/alibabacloud_oss_v2/config.py new file mode 100644 index 0000000..20f1d9d --- /dev/null +++ b/alibabacloud_oss_v2/config.py @@ -0,0 +1,82 @@ +from typing import Optional, Union +from .types import CredentialsProvider, HttpClient, Retryer +from . import defaults + +class Config(object): + """Configuration for client.""" + def __init__( + self, + region: str = None, + endpoint: Optional[str] = None, + signature_version: Optional[str] = None, + credentials_provider: Optional[CredentialsProvider] = None, + retry_max_attempts: Optional[int] = None, + retryer: Optional[Retryer] = None, + http_client: Optional[HttpClient] = None, + connect_timeout: Optional[Union[int, float]] = None, + readwrite_timeout: Optional[Union[int, float]] = None, + use_dualstack_endpoint: Optional[bool] = None, + use_accelerate_endpoint: Optional[bool] = None, + use_internal_endpoint: Optional[bool] = None, + disable_ssl: Optional[bool] = None, + insecure_skip_verify: Optional[bool] = None, + enabled_redirect: Optional[bool] = None, + use_cname: Optional[bool] = None, + use_path_style: Optional[bool] = None, + ) -> None: + """ + Args: + region (str, required): The region in which the bucket is located. + endpoint (str, optional): The domain names that other services can use to access OSS. + credentials_provider (CredentialsProvider, optional): The credentials provider to use when signing requests. + connect_timeout (int|float, optional): The time in seconds till a timeout exception is thrown + when attempting to make a connection. The default is 10 seconds. + readwrite_timeout (int|float, optional): The time in seconds till a timeout exception is thrown + when attempting to read from a connection. The default is 20 seconds. + retry_max_attempts (int, optional): Specifies the maximum number attempts an API client will call + an operation that fails with a retryable error. + retryer (Retryer, optional): Guides how HTTP requests should be retried in case of recoverable failures. + http_client (HttpClient, optional): The HTTP client to invoke API calls with. + use_cname (bool, optional): If the endpoint is s CName, set this flag to true + use_path_style (bool, optional): Allows you to enable the client to use path-style addressing, + i.e., https://oss-cn-hangzhou.aliyuncs.com/bucket/key. + signature_version (str, optional): The signature version when signing requests. Valid values v4, v1 + disable_ssl (bool, optional): Forces the endpoint to be resolved as HTTP. + insecure_skip_verify (bool, optional): Skip server certificate verification. + enabled_redirect (bool, optional): Enable http redirect or not. Default is disable + use_dualstack_endpoint (bool, optional): Dual-stack endpoints are provided in some regions. + This allows an IPv4 client and an IPv6 client to access a bucket by using the same endpoint. + Set this to True to use a dual-stack endpoint for the requests. + use_accelerate_endpoint (bool, optional): OSS provides the transfer acceleration feature to accelerate date transfers + of data uploads and downloads across countries and regions. + Set this to True to use a accelerate endpoint for the requests. + use_internal_endpoint (bool, optional): You can use an internal endpoint to communicate between Alibaba Cloud services located + within the same region over the internal network. You are not charged for the traffic generated over the internal network. + Set this to True to use a internal endpoint for the requests. + """ + self.region = region + self.endpoint = endpoint + self.signature_version = signature_version + self.credentials_provider = credentials_provider + self.retry_max_attempts = retry_max_attempts + self.retryer = retryer + self.http_client = http_client + self.connect_timeout = connect_timeout + self.readwrite_timeout = readwrite_timeout + self.use_dualstack_endpoint = use_dualstack_endpoint + self.use_accelerate_endpoint = use_accelerate_endpoint + self.use_internal_endpoint = use_internal_endpoint + self.disable_ssl = disable_ssl + self.insecure_skip_verify = insecure_skip_verify + self.enabled_redirect = enabled_redirect + self.use_cname = use_cname + self.use_path_style = use_path_style + + +def load_default() -> Config: + """Using the SDK's default configuration""" + return Config( + signature_version=defaults.DEFAULT_SIGNATURE_VERSION, + connect_timeout=defaults.DEFAULT_CONNECT_TIMEOUT, + readwrite_timeout=defaults.DEFAULT_READWRITE_TIMEOUT, + ) diff --git a/alibabacloud_oss_v2/crc.py b/alibabacloud_oss_v2/crc.py new file mode 100644 index 0000000..89114fd --- /dev/null +++ b/alibabacloud_oss_v2/crc.py @@ -0,0 +1,238 @@ +"""crc utils""" + +import sys +import crcmod + +MAX_INT = sys.maxsize + + +#----------------------------------------------------------------------------- +# Export mkCombineFun to user to support crc64 combine feature. +# +# Example: +# +# import crcmod +# +# _POLY = 0x142F0E1EBA9EA3693 +# _XOROUT = 0XFFFFFFFFFFFFFFFF +# +# string_a = '12345' +# string_b = '67890' +# +# combine_fun = mkCombineFun(_POLY, 0, True, _XOROUT) +# +# crc64_a = crcmod.Crc(_POLY, initCrc=0, xorOut=_XOROUT) +# crc64_a.update(string_a) +# +# crc64_b = crcmod.Crc(_POLY, initCrc=0, xorOut=_XOROUT) +# crc64_b.update(string_b) +# +# combine_fun(crc64_a.crcValue, crc64_b.crcValue, len(string_b)) +# + +def mkCombineFun(poly, initCrc=~int(0), rev=True, xorOut=0): + # mask = (1L<>= 1 + mat_index += 1 + + return summary + + +def _combine64(poly, initCrc, rev, xorOut, crc1, crc2, len2): + if len2 == 0: + return crc1 + + even = [0] * GF2_DIM + odd = [0] * GF2_DIM + + crc1 ^= initCrc ^ xorOut + + if (rev): + # put operator for one zero bit in odd + odd[0] = poly # CRC-64 polynomial + row = 1 + for n in range(1, GF2_DIM): + odd[n] = row + row <<= 1 + else: + row = 2 + for n in range(0, GF2_DIM - 1): + odd[n] = row + row <<= 1 + odd[GF2_DIM - 1] = poly + + gf2_matrix_square(even, odd) + + gf2_matrix_square(odd, even) + + while True: + gf2_matrix_square(even, odd) + if len2 & int(1): + crc1 = gf2_matrix_times(even, crc1) + len2 >>= 1 + if len2 == 0: + break + + gf2_matrix_square(odd, even) + if len2 & int(1): + crc1 = gf2_matrix_times(odd, crc1) + len2 >>= 1 + + if len2 == 0: + break + + crc1 ^= crc2 + + return crc1 + +#----------------------------------------------------------------------------- +# The below code copy from crcmod, see more detail please visist: +# https://bitbucket.org/cmcqueen1975/crcmod/src/8fb658289c35eff1d37cc47799569f90c5b39e1e/python2/crcmod/crcmod.py?at=default&fileviewer=file-view-default + +#----------------------------------------------------------------------------- +# Check the polynomial to make sure that it is acceptable and return the number +# of bits in the CRC. + +def _verifyPoly(poly): + msg = 'The degree of the polynomial must be 8, 16, 24, 32 or 64' + poly = int(poly) # Use a common representation for all operations + for n in (8,16,24,32,64): + low = int(1)<> 1 + if ((int(1)< None: + """Create a new crc64 hash instance.""" + self._crc = crcmod.Crc(0x142F0E1EBA9EA3693, + initCrc=init_crc, + rev=True, + xorOut=0XFFFFFFFFFFFFFFFF) + + def update(self, data) -> None: + """Update the current CRC value using the string specified as the data + parameter. + """ + self._crc.update(data) + + def digest(self): + """Return the digest of the bytes passed to the update() method so far. + """ + return self._crc.digest() + + def hexdigest(self): + """Return digest() as hexadecimal string. + """ + return self._crc.hexdigest() + + def reset(self): + """Resets the hash object to its initial state.""" + self._crc.crcValue = self._crc.initCrc + + def sum64(self): + """Return CRC64 value as int.""" + return self._crc.crcValue + + def write(self, data: bytes): + """Update the current CRC value using the string specified as the data + parameter. + """ + self._crc.update(data) + + @staticmethod + def combine(crc1, crc2, size) -> int: + """_summary_ + + Args: + crc1 (_type_): _description_ + crc2 (_type_): _description_ + size (_type_): _description_ + + Returns: + int: _description_ + """ + return _COMBINE_FUNC(crc1, crc2, size) diff --git a/alibabacloud_oss_v2/credentials/__init__.py b/alibabacloud_oss_v2/credentials/__init__.py new file mode 100644 index 0000000..36b3bc7 --- /dev/null +++ b/alibabacloud_oss_v2/credentials/__init__.py @@ -0,0 +1,2 @@ +# provider implement +from .provider_impl import * \ No newline at end of file diff --git a/alibabacloud_oss_v2/credentials/provider_impl.py b/alibabacloud_oss_v2/credentials/provider_impl.py new file mode 100644 index 0000000..f9a0c39 --- /dev/null +++ b/alibabacloud_oss_v2/credentials/provider_impl.py @@ -0,0 +1,58 @@ +import os +from typing import Optional +from ..types import Credentials, CredentialsProvider +from ..exceptions import CredentialsEmptyError + + +class AnonymousCredentialsProvider(CredentialsProvider): + """Access OSS anonymously. + """ + + def __init__(self) -> None: + self._credentials = Credentials("", "") + + def get_credentials(self) -> Credentials: + return self._credentials + + +class StaticCredentialsProvider(CredentialsProvider): + """Explicitly specify the AccessKey pair that you want to use to access OSS. + """ + + def __init__( + self, + access_key_id: str, + access_key_secret: str, + security_token: Optional[str] = None, + ) -> None: + """ + Args: + access_key_id (str): access key id to access OSS. + access_key_secret (str): access key secret to access OSS. + security_token (Optional[str], optional): The sts session token. + """ + self._credentials = Credentials( + access_key_id, access_key_secret, security_token) + + def get_credentials(self) -> Credentials: + return self._credentials + +class EnvironmentVariableCredentialsProvider(CredentialsProvider): + """Obtaining credentials from environment variables. + OSS_ACCESS_KEY_ID + OSS_ACCESS_KEY_SECRET + OSS_SESSION_TOKEN (Optional) + """ + + def __init__(self) -> None: + access_key_id = os.getenv("OSS_ACCESS_KEY_ID", '') + access_key_secret = os.getenv("OSS_ACCESS_KEY_SECRET", '') + + if access_key_id == '' or access_key_secret == '': + raise CredentialsEmptyError() + + self._credentials = Credentials( + access_key_id, access_key_secret, os.getenv("OSS_SESSION_TOKEN", None)) + + def get_credentials(self) -> Credentials: + return self._credentials diff --git a/alibabacloud_oss_v2/crypto/__init__.py b/alibabacloud_oss_v2/crypto/__init__.py new file mode 100644 index 0000000..3aaf6f7 --- /dev/null +++ b/alibabacloud_oss_v2/crypto/__init__.py @@ -0,0 +1,3 @@ + +from .types import * +from .master_rsa_cipher import MasterRsaCipher \ No newline at end of file diff --git a/alibabacloud_oss_v2/crypto/aes_ctr.py b/alibabacloud_oss_v2/crypto/aes_ctr.py new file mode 100644 index 0000000..8f6e70e --- /dev/null +++ b/alibabacloud_oss_v2/crypto/aes_ctr.py @@ -0,0 +1,267 @@ + +import struct +from typing import Any, Iterator, Iterable, AnyStr +from Crypto.Cipher import AES +from Crypto.Util import Counter +from Crypto import Random +from ..types import StreamBody +from .types import CipherData + +_KEY_LEN = 32 +_BLOCK_SIZE_LEN = 16 +_BLOCK_BITS_LEN = 8 * 16 + +def _iv_to_big_int(iv: bytes) -> int: + iv_high_low_pair = struct.unpack(">QQ", iv) + iv_big_int = iv_high_low_pair[0] << 64 | iv_high_low_pair[1] + return iv_big_int + +class IteratorEncryptor(): + """_summary_ + """ + + def __init__( + self, + iterable: Iterable, + cipher_data: CipherData, + counter: int + ) -> None: + self._iterable = iterable + self._cipher_data = cipher_data + self._counter = counter + self._cipher = None + self._iter = None + self._finished = False + self._remains_bytes = None + + def __iter__(self): + ctr = Counter.new(_BLOCK_BITS_LEN, initial_value=self._counter) + self._cipher = AES.new(self._cipher_data.key, AES.MODE_CTR, counter=ctr) + self._iter = iter(self._iterable) + self._finished = False + self._remains_bytes = None + return self + + def __next__(self): + if self._finished: + raise StopIteration + + data = self._remains_bytes or b'' + self._remains_bytes = None + try: + while True: + d = next(self._iter) + if isinstance(d, int): + d = d.to_bytes() + elif isinstance(d, str): + d = d.encode() + if len(d) < _BLOCK_SIZE_LEN: + data += d + else: + if len(data) > 0: + data += d + else: + data = d + if len(data) >= _BLOCK_SIZE_LEN: + data_len = len(data) + align_len = (data_len // _BLOCK_SIZE_LEN) * _BLOCK_SIZE_LEN + edata = self._cipher.encrypt(data[:align_len]) + if data_len > align_len: + self._remains_bytes = data[align_len:] + return edata + except StopIteration as err: + self._finished = True + if len(data) > 0: + return self._cipher.encrypt(data) + raise err + +class FileLikeEncryptor(): + """_summary_ + """ + def __init__( + self, + reader: Any, + cipher_data: CipherData, + offset: int + ) -> None: + self._reader = reader + self._cipher_data = cipher_data + self._cipher = None + self._base = reader.tell() + self._roffset = self._base + self._offset = offset + + def read(self, n: int = -1) -> AnyStr: + """_summary_ + + Args: + n (int, optional): _description_. Defaults to -1. + + Returns: + AnyStr: _description_ + """ + if self._cipher is None: + reloffset = self._roffset - self._base + if not 0 == reloffset % _BLOCK_SIZE_LEN: + raise ValueError('relative offset is not align to encrypt block') + counter = _iv_to_big_int(self._cipher_data.iv) + (self._offset + reloffset)//_BLOCK_SIZE_LEN + ctr = Counter.new(_BLOCK_BITS_LEN, initial_value=counter) + self._cipher = AES.new(self._cipher_data.key, AES.MODE_CTR, counter=ctr) + + if n >= 0 and 0 != n % _BLOCK_SIZE_LEN: + raise ValueError('n is not align to encrypt block') + + return self._cipher.encrypt(self._reader.read(n)) + + def seek(self, offset: int, whence: int = 0) -> int: + """_summary_ + + Args: + offset (int): _description_ + whence (int, optional): _description_. Defaults to 0. + + Returns: + int: _description_ + """ + offset = self._reader.seek(offset, whence) + if offset < self._base: + raise ValueError(f'Offset {offset} is less than base {self._base}, can not creates cipher.') + + self._roffset = offset + self._cipher = None + return offset + + def tell(self) -> int: + """_summary_ + """ + return self._reader.tell() + + +class StreamBodyDecryptor(StreamBody): + """_summary_ + """ + def __init__( + self, + stream: StreamBody, + cipher_data: CipherData, + counter: int + ) -> None: + self._stream = stream + self._cipher_data = cipher_data + self._counter = counter + + def __enter__(self) -> "StreamBodyDecryptor": + self._stream.__enter__() + return self + + def __exit__(self, *args: Any) -> None: + self._stream.__exit__(*args) + + @property + def is_closed(self) -> bool: + return self._stream.is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._stream.is_stream_consumed + + @property + def content(self) -> bytes: + if not self._stream.is_stream_consumed: + self._stream.read() + return self._get_cipher().decrypt(self._stream.content) + + def read(self) -> bytes: + return self._get_cipher().decrypt(self._stream.read()) + + def close(self) -> None: + self._stream.close() + + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + cipher = self._get_cipher() + for d in self._stream.iter_bytes(**kwargs): + yield cipher.decrypt(d) + + def _get_cipher(self): + ctr = Counter.new(_BLOCK_BITS_LEN, initial_value=self._counter) + return AES.new(self._cipher_data.key, AES.MODE_CTR, counter=ctr) + + +class _AesCtr: + def __init__( + self, + cipher_data: CipherData, + offset: int + ): + self.cipher_data = cipher_data + self.offset = offset + if not 0 == offset % _BLOCK_SIZE_LEN: + raise ValueError('offset is not align to encrypt block') + self.counter = _iv_to_big_int(cipher_data.iv) + offset//_BLOCK_SIZE_LEN + self.no_bytes = False + self.no_str = False + + def encrypt(self, src: Any) -> Any: + """_summary_ + + Args: + src (Any): _description_ + + Returns: + Any: _description_ + """ + if not self.no_str and isinstance(src, str): + return self._get_cipher().encrypt(src.encode()) + + if not self.no_bytes and isinstance(src, bytes): + return self._get_cipher().encrypt(src) + + # file-like object + if hasattr(src, 'seek') and hasattr(src, 'read'): + return FileLikeEncryptor(reader=src, cipher_data=self.cipher_data, offset=self.offset) + + if isinstance(src, Iterable): + return IteratorEncryptor(iterable=src, cipher_data=self.cipher_data, counter=self.counter) + + raise TypeError(f'src is not str/bytes/file-like/Iterable type, got {type(src)}') + + def decrypt(self, src: Any) -> Any: + """_summary_ + + Args: + src (Any): _description_ + + Returns: + Any: _description_ + """ + if isinstance(src, bytes): + return self._get_cipher().decrypt(src) + + if not isinstance(src, StreamBody): + raise TypeError(f'src is not StreamBody type, got {type(src)}') + + return StreamBodyDecryptor(src, self.cipher_data, self.counter) + + def _get_cipher(self): + ctr = Counter.new(_BLOCK_BITS_LEN, initial_value=self.counter) + return AES.new(self.cipher_data.key, AES.MODE_CTR, counter=ctr) + + @staticmethod + def random_key() -> bytes: + """_summary_ + + Returns: + bytes: _description_ + """ + return Random.new().read(_KEY_LEN) + + @staticmethod + def random_iv() -> bytes: + """_summary_ + + Returns: + bytes: _description_ + """ + iv = Random.new().read(16) + safe_iv = iv[0:8] + struct.pack(">L", 0) + iv[12:] + return safe_iv diff --git a/alibabacloud_oss_v2/crypto/aes_ctr_cipher.py b/alibabacloud_oss_v2/crypto/aes_ctr_cipher.py new file mode 100644 index 0000000..e6721b9 --- /dev/null +++ b/alibabacloud_oss_v2/crypto/aes_ctr_cipher.py @@ -0,0 +1,105 @@ + +from typing import Any +from .types import ( + ContentCipherBuilder, + MasterCipher, + ContentCipher, + CipherData, + Envelope +) +from .aes_ctr import _AesCtr + +class _AESCtrCipher(ContentCipher): + def __init__( + self, + cipher_data: CipherData, + offset: int + ): + self._cipher_data = cipher_data + self._cipher = _AesCtr(cipher_data, offset) + + def encrypt_content(self, data: Any) -> Any: + """_summary_ + """ + return self._cipher.encrypt(data) + + def decrypt_content(self, data: Any) -> Any: + """_summary_ + """ + reader = self._cipher.decrypt(data) + return reader + + def clone(self, **kwargs) -> ContentCipher: + """_summary_ + """ + return _AESCtrCipher( + cipher_data=self._cipher_data, + offset = kwargs.get("offset", 0) + ) + + def get_encrypted_len(self, plain_text_len: int) -> int: + """AES CTR encryption mode does not change content length + """ + return plain_text_len + + def get_cipher_data(self) -> CipherData: + return self._cipher_data + + def get_align_len(self) -> int: + return len(self._cipher_data.iv) + + +class AESCtrCipherBuilder(ContentCipherBuilder): + """_summary_ + + Args: + ContentCipherBuilder (_type_): _description_ + """ + def __init__( + self, + master_cipher: MasterCipher, + ): + self.master_cipher = master_cipher + + def content_cipher(self) -> ContentCipher: + cd = self._create_cipher_data() + return self._content_cipher_from_cd(cd, 0) + + def content_cipher_from_env(self, env: Envelope, **kwargs) -> ContentCipher: + encrypted_key = env.cipher_key + encrypted_iv = env.iv + key = self.master_cipher.decrypt(encrypted_key) + iv = self.master_cipher.decrypt(encrypted_iv) + offset = kwargs.get("offset", 0) + return self._content_cipher_from_cd( + CipherData( + key=key, + iv=iv, + encrypted_key=encrypted_key, + encrypted_iv=encrypted_iv, + wrap_algorithm=env.wrap_algorithm, + cek_algorithm=env.cek_algorithm, + mat_desc=env.mat_desc + ), + offset) + + def get_mat_desc(self) -> str: + return self.master_cipher.get_mat_desc() + + def _create_cipher_data(self) -> CipherData: + key = _AesCtr.random_key() + iv = _AesCtr.random_iv() + encrypted_key = self.master_cipher.encrypt(key) + encrypted_iv = self.master_cipher.encrypt(iv) + return CipherData( + key=key, + iv=iv, + encrypted_key=encrypted_key, + encrypted_iv=encrypted_iv, + wrap_algorithm=self.master_cipher.get_wrap_algorithm(), + cek_algorithm='AES/CTR/NoPadding', + mat_desc=self.master_cipher.get_mat_desc() + ) + + def _content_cipher_from_cd(self, cd:CipherData, offset: int) -> ContentCipher: + return _AESCtrCipher(cipher_data=cd, offset=offset) diff --git a/alibabacloud_oss_v2/crypto/cipher.py b/alibabacloud_oss_v2/crypto/cipher.py new file mode 100644 index 0000000..60baa60 --- /dev/null +++ b/alibabacloud_oss_v2/crypto/cipher.py @@ -0,0 +1,23 @@ +import abc +from typing import Any + +class Encrypter(abc.ABC): + """Encrypter is interface with only encrypt method""" + + @abc.abstractmethod + def encrypt(self, reader: Any) -> Any: + """_summary_ + """ + + +class Decrypter(abc.ABC): + """Decrypter is interface with only decrypt method""" + + @abc.abstractmethod + def decrypt(self, reader: Any) -> Any: + """_summary_ + """ + +class Cipher(Encrypter, Decrypter): + """_summary_ + """ diff --git a/alibabacloud_oss_v2/crypto/master_rsa_cipher.py b/alibabacloud_oss_v2/crypto/master_rsa_cipher.py new file mode 100644 index 0000000..19d95d8 --- /dev/null +++ b/alibabacloud_oss_v2/crypto/master_rsa_cipher.py @@ -0,0 +1,49 @@ +import json +from typing import Optional, Dict +from Crypto.Cipher import PKCS1_v1_5 +from Crypto.PublicKey import RSA +from .types import MasterCipher + +class MasterRsaCipher(MasterCipher): + """MasterRsaCipher implements rsa master key interface + """ + def __init__( + self, + mat_desc: Optional[Dict] = None, + public_key: Optional[str] = None, + private_key: Optional[str] = None, + ): + self._public_key = public_key + self._private_key = private_key + self._mat_desc = None + if mat_desc is not None and len(mat_desc.items()) > 0: + self._mat_desc = json.dumps(mat_desc) + + self._encrypt_obj = None + if public_key is not None: + self._encrypt_obj = PKCS1_v1_5.new(RSA.importKey(public_key)) + + self._decrypt_obj = None + if private_key is not None: + self._decrypt_obj = PKCS1_v1_5.new(RSA.importKey(private_key)) + + + def get_wrap_algorithm(self) -> str: + return 'RSA/NONE/PKCS1Padding' + + def get_mat_desc(self) -> str: + return self._mat_desc or '' + + def encrypt(self, data: bytes) -> bytes: + if self._encrypt_obj is None: + raise ValueError('RSA public key is none or invalid.') + + return self._encrypt_obj.encrypt(data) + + def decrypt(self, data: bytes) -> bytes: + if self._decrypt_obj is None: + raise ValueError('RSA private key is none or invalid.') + decrypted_data = self._decrypt_obj.decrypt(data, object) + if decrypted_data == object: + raise ValueError('Decrypted data error, please check RSA private key!') + return decrypted_data diff --git a/alibabacloud_oss_v2/crypto/types.py b/alibabacloud_oss_v2/crypto/types.py new file mode 100644 index 0000000..f7be42a --- /dev/null +++ b/alibabacloud_oss_v2/crypto/types.py @@ -0,0 +1,151 @@ + +import abc +import copy +from typing import Optional, Any +from ..utils import safety_str + +class MasterCipher(abc.ABC): + """Base abstract base class to encrypt or decrpt CipherData""" + + @abc.abstractmethod + def encrypt(self, data: bytes) -> bytes: + """_summary_ + """ + + @abc.abstractmethod + def decrypt(self, data: bytes) -> bytes: + """_summary_ + """ + + @abc.abstractmethod + def get_wrap_algorithm(self) -> str: + """_summary_ + """ + + @abc.abstractmethod + def get_mat_desc(self) -> str: + """_summary_ + """ + +class ContentCipher(abc.ABC): + """Base abstract base class to encrypt or decrpt object's data""" + + @abc.abstractmethod + def encrypt_content(self, data: Any) -> Any: + """_summary_ + """ + + @abc.abstractmethod + def decrypt_content(self, data: Any) -> Any: + """_summary_ + """ + + @abc.abstractmethod + def clone(self, **kwargs) -> "ContentCipher": + """_summary_ + """ + + @abc.abstractmethod + def get_encrypted_len(self, plain_text_len: int) -> int: + """_summary_ + """ + + @abc.abstractmethod + def get_cipher_data(self) -> "CipherData": + """_summary_ + """ + + @abc.abstractmethod + def get_align_len(self) -> int: + """_summary_ + """ + + +class Envelope: + """Envelope is stored in object's meta""" + + def __init__( + self, + iv: Optional[bytes] = None, + cipher_key: Optional[bytes] = None, + mat_desc: Optional[str] = None, + wrap_algorithm: Optional[str] = None, + cek_algorithm: Optional[str] = None, + unencrypted_md5: Optional[str] = None, + unencrypted_content_length: Optional[str] = None, + ): + self.iv = iv + self.cipher_key = cipher_key + self.mat_desc = mat_desc + self.wrap_algorithm = wrap_algorithm + self.cek_algorithm = cek_algorithm + self.unencrypted_md5 = unencrypted_md5 + self.unencrypted_content_length = unencrypted_content_length + + def is_valid(self) -> bool: + """_summary_ + + Returns: + bool: _description_ + """ + return (len(self.iv or b'') > 0 and + len(self.cipher_key or b'') > 0 and + safety_str(self.wrap_algorithm) != '' and + safety_str(self.cek_algorithm) != '') + + + def random_key_iv(self): + """_summary_ + """ + +class CipherData: + """CipherData is secret key information.""" + + def __init__( + self, + iv: bytes = None, + key: bytes = None, + encrypted_iv: bytes = None, + encrypted_key: bytes = None, + mat_desc: str = None, + wrap_algorithm: str = None, + cek_algorithm: str = None, + ): + self.iv = iv + self.key = key + self.encrypted_iv = encrypted_iv + self.encrypted_key = encrypted_key + self.mat_desc = mat_desc + self.wrap_algorithm = wrap_algorithm + self.cek_algorithm = cek_algorithm + + def clone(self) -> "CipherData": + """_summary_ + + Returns: + CipherData: _description_ + """ + return copy.deepcopy(self) + + def random_key_iv(self): + """_summary_ + """ + + +class ContentCipherBuilder(abc.ABC): + """Base abstract base class to create ContentCipher""" + + @abc.abstractmethod + def content_cipher(self) -> ContentCipher: + """_summary_ + """ + + @abc.abstractmethod + def content_cipher_from_env(self, env: Envelope, **kwargs) -> ContentCipher: + """_summary_ + """ + + @abc.abstractmethod + def get_mat_desc(self) -> str: + """_summary_ + """ diff --git a/alibabacloud_oss_v2/defaults.py b/alibabacloud_oss_v2/defaults.py new file mode 100644 index 0000000..00a18d0 --- /dev/null +++ b/alibabacloud_oss_v2/defaults.py @@ -0,0 +1,64 @@ +DEFAULT_CONNECT_TIMEOUT = 10 +DEFAULT_READWRITE_TIMEOUT = 10 + +# Default signature version is v4 +DEFAULT_SIGNATURE_VERSION = "v4" + +# Product for signing +DEFAULT_PRODUCT = "oss" + +# The URL's scheme, default is https +DEFAULT_ENDPOINT_SCHEME = "https" + +DEFAULT_MAX_ATTEMPTS = 3 +DEFAULT_MAX_BACKOFF_S = 20.0 +DEFAULT_BASE_DELAY_S = 0.2 + +DEFAULT_IDLE_CONNECTION_TIMEOUT = 50 +DEFAULT_KEEP_ALIVE_TIMEOUT = 30 +DEFAULT_EXPECT_CONTINUE_TIMEOUT = 30 + +DEFAULT_MAX_CONNECTIONS = 20 + +# TLS 1.2 for all HTTPS requests. +# DEFAULT_TLS_MIN_VERSION = 1.2 + +DEFAULT_CHUNK_SIZE = 16 * 1024 + +# Default part size, 6M +DEFAULT_PART_SIZE = 6 * 1024 * 1024 + +# Default part size for uploader uploads data +DEFAULT_UPLOAD_PART_SIZE = DEFAULT_PART_SIZE + +# Default part size for downloader downloads object +DEFAULT_DOWNLOAD_PART_SIZE = DEFAULT_PART_SIZE + +# Default part size for copier copys object, 64M +DEFAULT_COPY_PART_SIZE = 64 * 1024 * 1024 + +# Default parallel +DEFAULT_PARALLEL = 3 + +# Default parallel for uploader uploads data +DEFAULT_UPLOAD_PARALLEL = DEFAULT_PARALLEL + +# Default parallel for downloader downloads object +DEFAULT_DOWNLOAD_PARALLEL = DEFAULT_PARALLEL + +# Default parallel for copier copys object +DEFAULT_COPY_PARALLEL = DEFAULT_PARALLEL + +# Temp file suffix +DEFAULT_TEMP_FILE_SUFFIX = ".temp" + +MAX_UPLOAD_PARTS = 10000 + +#Checkpoint file suffix for Downloader +CHECKPOINT_FILE_SUFFIX_DOWNLOADER = ".dcp" + +#Checkpoint file suffix for Uploader +CHECKPOINT_FILE_SUFFIX_UPLOADER = ".ucp" + +#Checkpoint file Magic +CHECKPOINT_MAGIC = "92611BED-89E2-46B6-89E5-72F273D4B0A3" \ No newline at end of file diff --git a/alibabacloud_oss_v2/downloader.py b/alibabacloud_oss_v2/downloader.py new file mode 100644 index 0000000..ae60b75 --- /dev/null +++ b/alibabacloud_oss_v2/downloader.py @@ -0,0 +1,546 @@ +"""Downloader for handling objects for downloads.""" +import abc +import copy +import os +import concurrent.futures +import threading +from typing import Iterator, Any, Optional, IO +from . import exceptions +from . import models +from . import validation +from . import utils +from . import io_utils +from . import defaults +from .serde import copy_request +from .checkpoint import DownloadCheckpoint +from .crc import Crc64 + +class DownloadAPIClient(abc.ABC): + """Abstract base class for downloader client.""" + + @abc.abstractmethod + def head_object(self, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """Queries information about the object in a bucket.""" + + @abc.abstractmethod + def get_object(self, request: models.GetObjectRequest, **kwargs) -> models.GetObjectResult: + """ + Queries an object. To call this operation, you must have read permissions on the object. + """ + +class DownloaderOptions: + """_summary_ + """ + + def __init__( + self, + part_size: Optional[int] = None, + parallel_num: Optional[int] = None, + block_size: Optional[int] = None, + use_temp_file: Optional[bool] = None, + enable_checkpoint: Optional[bool] = None, + checkpoint_dir: Optional[str] = None, + verify_data: Optional[bool] = None, + ) -> None: + self.part_size = part_size + self.parallel_num = parallel_num + self.block_size = block_size + self.use_temp_file = use_temp_file or False + self.enable_checkpoint = enable_checkpoint or False + self.checkpoint_dir = checkpoint_dir + self.verify_data = verify_data + + +class DownloadResult: + """_summary_ + """ + + def __init__( + self, + written: Optional[int], + ) -> None: + self.written = written + +class DownloadError(exceptions.BaseError): + """ + Download Error. + """ + fmt = 'download failed, {path}, {error}.' + + def __init__(self, **kwargs): + exceptions.BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + self.path = kwargs.get("path", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + +class Downloader: + """Downloader for handling objects for downloads.""" + + def __init__( + self, + client: DownloadAPIClient, + **kwargs: Any + ) -> None: + """ + client (DownloadAPIClient): A agent that implements the HeadObject and GetObject api. + downloader_options (DownloaderOptions, optional): optional. + """ + part_size = kwargs.get('part_size', defaults.DEFAULT_DOWNLOAD_PART_SIZE) + parallel_num = kwargs.get('parallel_num', defaults.DEFAULT_DOWNLOAD_PARALLEL) + self._client = client + self._options = DownloaderOptions( + part_size=part_size, + parallel_num=parallel_num, + block_size=kwargs.get('block_size', None), + use_temp_file=kwargs.get('use_temp_file', None), + enable_checkpoint=kwargs.get('enable_checkpoint', None), + checkpoint_dir=kwargs.get('checkpoint_dir', None), + verify_data=kwargs.get('verify_data', None), + ) + + feature_flags = 0 + cstr = str(client) + if cstr == '': + feature_flags = client._client._options.feature_flags + elif cstr == '': + feature_flags = client.unwrap()._client._options.feature_flags + self._feature_flags = feature_flags + + + def download_file( + self, + request: models.GetObjectRequest, + filepath: str, + **kwargs: Any + ) -> DownloadResult: + """_summary_ + + Args: + request (models.GetObjectRequest): _description_ + file_path (str): _description_ + + Returns: + DownloadResult: _description_ + """ + delegate = self._delegate(request, **kwargs) + + delegate.check_source() + + delegate.check_destination(filepath) + + delegate.adjust_range() + + delegate.check_checkpoint() + + with open(delegate.writer_filepath, 'ab') as _: + pass + with open(delegate.writer_filepath, 'rb+') as writer: + + delegate.adjust_writer(writer) + + delegate.update_crc_flag() + + result = delegate.download() + + delegate.close_writer(writer) + + return result + + + def download_to( + self, + request: models.GetObjectRequest, + writer: IO[bytes], + **kwargs: Any + ) -> DownloadResult: + """_summary_ + + Args: + request (models.GetObjectRequest): _description_ + writer (IO[bytes]): _description_ + options (Optional[DownloaderOptions], optional): _description_. Defaults to None. + + Returns: + DownloadResult: _description_ + """ + delegate = self._delegate(request, **kwargs) + + delegate.check_source() + + delegate.adjust_range() + + delegate.adjust_writer(writer) + + result = delegate.download() + + return result + + def _delegate( + self, + request: models.GetObjectRequest, + **kwargs: Any + ) -> "_DownloaderDelegate": + + if request is None: + raise exceptions.ParamNullError(field='request') + + if not validation.is_valid_bucket_name(utils.safety_str(request.bucket)): + raise exceptions.ParamInvalidError(field='request.bucket') + + if not validation.is_valid_object_name(utils.safety_str(request.key)): + raise exceptions.ParamInvalidError(field='request.key') + + if request.range_header and not validation.is_valid_range(request.range_header): + raise exceptions.ParamNullError(field='request.range_header') + + options = copy.copy(self._options) + options.part_size = kwargs.get('part_size', self._options.part_size) + options.parallel_num = kwargs.get('parallel_num', self._options.parallel_num) + options.block_size = kwargs.get('block_size', self._options.block_size) + options.use_temp_file = kwargs.get('use_temp_file', self._options.use_temp_file) + options.enable_checkpoint = kwargs.get('enable_checkpoint', self._options.enable_checkpoint) + options.checkpoint_dir = kwargs.get('checkpoint_dir', self._options.checkpoint_dir) + options.verify_data = kwargs.get('verify_data', self._options.verify_data) + + if options.part_size <= 0: + options.part_size = defaults.DEFAULT_DOWNLOAD_PART_SIZE + + if options.parallel_num <= 0: + options.parallel_num = defaults.DEFAULT_DOWNLOAD_PARALLEL + + delegate = _DownloaderDelegate( + base=self, + client=self._client, + request=request, + options=options + ) + + return delegate + + +class _DownloaderDelegate: + def __init__( + self, + base: Downloader, + client: DownloadAPIClient, + request: models.GetObjectRequest, + options: DownloaderOptions, + ) -> None: + """ + """ + self._base = base + self._client = client + self._reqeust = request + self._options = options + + self._rstart = 0 + self._pos = 0 + self._epos = 0 + self._written = 0 + + parallel = options.parallel_num > 1 + self._writer = None + self._writer_lock = threading.Lock() if parallel else None + self._progress_lock = threading.Lock() if parallel else None + + #Source's Info + self._size_in_bytes = None + self._modtime = None + self._etag = None + self._headers = None + + #Destination's Info + self._filepath = None + self._temp_filepath = None + + #CRC + self._calc_crc = False + self._check_crc = False + self._ccrc = 0 + self._next_offset = 0 + + #checkpoint + self._checkpoint: DownloadCheckpoint = None + + #use mulitpart download + self._download_errors = [] + + @property + def writer_filepath(self) -> str: + """_summary_ + """ + return self._temp_filepath + + def check_source(self): + """_summary_ + """ + request = models.HeadObjectRequest(self._reqeust.bucket, self._reqeust.key) + copy_request(request, self._reqeust) + result = self._client.head_object(request) + + self._size_in_bytes = result.content_length + self._modtime = result.last_modified + self._etag = result.etag + self._headers = result.headers + + def check_destination(self, filepath: str): + """_summary_ + """ + if len(utils.safety_str(filepath)) == 0: + raise exceptions.ParamInvalidError(field='filepath') + + absfilepath = os.path.abspath(filepath) + tempfilepath = absfilepath + if self._options.use_temp_file: + tempfilepath += defaults.DEFAULT_TEMP_FILE_SUFFIX + + self._filepath = absfilepath + self._temp_filepath = tempfilepath + + + def adjust_range(self): + """_summary_ + """ + self._pos = 0 + self._rstart = 0 + self._epos = self._size_in_bytes + + if self._reqeust.range_header is not None: + range_header = utils.parse_http_range(self._reqeust.range_header) + if range_header[0] >= self._size_in_bytes: + raise ValueError(f'invalid range, size :{self._size_in_bytes}, range: {self._reqeust.range_header}') + if range_header[0] > 0: + self._pos = range_header[0] + self._rstart = self._pos + + if range_header[1] > 0: + self._epos = min(range_header[1] + 1, self._size_in_bytes) + + def check_checkpoint(self): + """_summary_ + """ + if not self._options.enable_checkpoint: + return + + checkpoint = DownloadCheckpoint( + request=self._reqeust, + filepath=self._temp_filepath, + basedir=self._options.checkpoint_dir, + headers=self._headers, + part_size=self._options.part_size) + + checkpoint.verify_data = self._options.verify_data + checkpoint.load() + if checkpoint.loaded: + self._pos = checkpoint.doffset + self._written = self._pos - self._rstart + else: + checkpoint.doffset = self._pos + + self._checkpoint = checkpoint + #crc + self._ccrc = checkpoint.dcrc64 + self._next_offset = checkpoint.doffset + + + def adjust_writer(self, writer:IO[bytes]): + """_summary_ + + Args: + writer (_type_): _description_ + """ + try: + writer.truncate(self._pos - self._rstart) + except OSError: + pass + + self._writer = writer + + def close_writer(self, writer:IO[bytes]): + """_summary_ + + Args: + writer (_type_): _description_ + """ + if writer: + writer.close() + + if self._temp_filepath != self._filepath: + io_utils.rename_file(self._temp_filepath, self._filepath) + + if self._checkpoint: + self._checkpoint.remove() + + self._writer = None + self._checkpoint = None + + def update_crc_flag(self): + """_summary_ + """ + #FF_ENABLE_CRC64_CHECK_DOWNLOAD + if (self._base._feature_flags & 0x00000010) > 0: + self._check_crc = self._reqeust.range_header is None + self._calc_crc = (self._checkpoint is not None and self._checkpoint.verify_data) or self._check_crc + + def download(self) -> DownloadResult: + """_summary_ + """ + parallel = self._options.parallel_num > 1 + seekable = utils.is_seekable(self._writer) + if not seekable: + parallel = False + if self._epos - self._pos <= self._options.part_size: + parallel = False + + if parallel: + with concurrent.futures.ThreadPoolExecutor(self._options.parallel_num) as executor: + for result in executor.map(self._process_part, self._iter_part_start()): + self._update_process_result(result) + else: + if seekable: + self._writer.seek(self._pos - self._rstart, os.SEEK_SET) + for start in self._iter_part_start(): + self._update_process_result(self._process_part(start)) + if len(self._download_errors) > 0: + break + + if len(self._download_errors) > 0: + raise self._wrap_error(self._download_errors[-1]) + + self._assert_crc_same() + + return DownloadResult(written=self._written) + + def _iter_part_start(self) -> Iterator[int]: + start = self._pos + while start < self._epos: + yield start + start += self._options.part_size + + # When an error occurs, stop download + if len(self._download_errors) > 0: + break + + def _calc_part_size(self, start:int): + if start + self._options.part_size > self._epos: + size = self._epos - start + else: + size = self._options.part_size + return size + + def _process_part(self, start:int): + # When an error occurs, ignore other download requests + if len(self._download_errors) > 0: + return None + + size = self._calc_part_size(start) + request = copy.copy(self._reqeust) + + got = 0 + error: Exception = None + + chash: Crc64 = None + if self._calc_crc: + chash = Crc64(0) + + while True: + request.range_header = f'bytes={start + got}-{start + size - 1}' + request.range_behavior = 'standard' + + try: + result = self._client.get_object(request) + except Exception as err: + error = err + break + + kwargs = {} + if self._options.block_size: + kwargs['block_size'] = self._options.block_size + + try: + for d in result.body.iter_bytes(**kwargs): + l = len(d) + if l > 0: + self._write_to_stream(d, start + got) + self._update_progress(l) + got += l + if chash: + chash.update(d) + + break + except Exception: + pass + + return start, got, error, (chash.sum64() if chash else 0) + + + def _write_to_stream(self, data, start): + if self._writer_lock: + with self._writer_lock: + self._writer.seek(start - self._rstart) + self._writer.write(data) + else: + self._writer.write(data) + + def _update_progress(self, increment: int): + if self._progress_lock: + with self._progress_lock: + self._written += increment + else: + self._written += increment + + #print(f'_update_progress: {increment}, {self._written}, {self._size_in_bytes}\n') + + def _update_process_result(self, result): + print(f'_update_process_result: {result}') + if result is None: + return + + if result[2] is not None: + self._download_errors.append(result[2]) + return + + start = result[0] + size = result[1] + crc = result[3] + + if self._next_offset != start: + if len(self._download_errors) == 0: + self._download_errors.append( + ValueError(f'out of order, expect offset {self._next_offset}, but got {start}')) + + if len(self._download_errors) > 0: + return + + self._next_offset = start + size + + if self._check_crc: + self._ccrc = Crc64.combine(self._ccrc, crc, size) + + if self._checkpoint: + self._checkpoint.dcrc64 = self._ccrc + self._checkpoint.doffset = self._next_offset + self._checkpoint.dump() + + + def _assert_crc_same(self): + if not self._check_crc: + return + + scrc = self._headers.get('x-oss-hash-crc64ecma', None) + if scrc is None: + return + + ccrc = str(self._ccrc) + if scrc != ccrc: + raise self._wrap_error(exceptions.InconsistentError(client_crc=ccrc, server_crc=scrc)) + + + def _wrap_error(self, error: Exception) -> Exception: + return DownloadError( + path=f'oss://{self._reqeust.bucket}/{self._reqeust.key}', + error=error + ) diff --git a/alibabacloud_oss_v2/encryption_client.py b/alibabacloud_oss_v2/encryption_client.py new file mode 100644 index 0000000..f5a010a --- /dev/null +++ b/alibabacloud_oss_v2/encryption_client.py @@ -0,0 +1,423 @@ +# pylint: disable=line-too-long +"""_summary_""" +import copy +import base64 +from typing import MutableMapping, List, Optional, cast +from .types import StreamBody, CaseInsensitiveDict +from .client import Client +from . import models +from . import exceptions +from . import utils +from . import io_utils +from .crypto import MasterCipher, Envelope, ContentCipherBuilder, ContentCipher, CipherData +from .crypto.aes_ctr_cipher import AESCtrCipherBuilder +from .crypto.aes_ctr import _BLOCK_SIZE_LEN + +class EncryptionMultiPartContext: + """EncryptionMultiPartContext save encryption or decryption information + """ + def __init__( + self, + content_cipher: ContentCipher, + data_size: int, + part_size: int, + ) -> None: + self.content_cipher = content_cipher + self.data_size = data_size + self.part_size = part_size + + def is_valid(self) -> bool: + """_summary_ + """ + if (self.content_cipher is None or + self.data_size == 0 or + self.part_size == 0): + return False + + return True + +class EncryptionClient: + """_summary_ + """ + + def __init__( + self, + client: Client, + master_cipher: MasterCipher, + decrypt_master_ciphers: Optional[List[MasterCipher]] = None, + ) -> None: + self._client = client + self._master_cipher = master_cipher + self._defualt_ccbuilder = AESCtrCipherBuilder(master_cipher) + self._decrypt_master_ciphers = decrypt_master_ciphers or [] + self._ccbuilders = {} + for mc in self._decrypt_master_ciphers: + mat_desc = mc.get_mat_desc() or '' + if len(mat_desc) > 0: + self._ccbuilders[mat_desc] = AESCtrCipherBuilder(mc) + + def unwrap(self) -> Client: + """_summary_ + + Returns: + Client: _description_ + """ + return self._client + + def __repr__(self) -> str: + return "" + + # object + def put_object(self, request: models.PutObjectRequest, **kwargs + ) -> models.PutObjectResult: + """ + Uploads objects. + + Args: + request (PutObjectRequest): Request parameters for PutObject operation. + + Returns: + PutObjectResult: Reponse result for PutObject operation. + """ + + return self._put_object_securely(request, **kwargs) + + def get_object(self, request: models.GetObjectRequest, **kwargs + ) -> models.GetObjectResult: + """ + Queries an object. To call this operation, you must have read permissions on the object. + + Args: + request (GetObjectRequest): Request parameters for GetObject operation. + + Returns: + GetObjectResult: Reponse result for GetObject operation. + """ + + return self._get_object_securely(request, **kwargs) + + def head_object(self, request: models.HeadObjectRequest, **kwargs + ) -> models.HeadObjectResult: + """ + Queries information about the object in a bucket. + + Args: + request (HeadObjectRequest): Request parameters for HeadObject operation. + + Returns: + HeadObjectResult: Reponse result for HeadObject operation. + """ + + return self._client.head_object(request, **kwargs) + + def initiate_multipart_upload(self, request: models.InitiateMultipartUploadRequest, **kwargs + ) -> models.InitiateMultipartUploadResult: + """ + Initiates a multipart upload task before you can upload data in parts to Object Storage Service (OSS). + + Args: + request (InitiateMultipartUploadRequest): Request parameters for InitiateMultipartUpload operation. + + Returns: + InitiateMultipartUploadResult: Reponse result for InitiateMultipartUpload operation. + """ + + return self._initiate_multipart_upload_securely(request, **kwargs) + + def upload_part(self, request: models.UploadPartRequest, **kwargs + ) -> models.UploadPartResult: + """ + Call the UploadPart interface to upload data in blocks (parts) based on the specified Object name and uploadId. + + Args: + request (UploadPartRequest): Request parameters for UploadPart operation. + + Returns: + UploadPartResult: Reponse result for UploadPart operation. + """ + + return self._upload_part_securely(request, **kwargs) + + def complete_multipart_upload(self, request: models.CompleteMultipartUploadRequest, **kwargs + ) -> models.CompleteMultipartUploadResult: + """ + Completes the multipart upload task of an object after all parts of the object are uploaded. + + Args: + request (CompleteMultipartUploadRequest): Request parameters for CompleteMultipartUpload operation. + + Returns: + CompleteMultipartUploadResult: Reponse result for CompleteMultipartUpload operation. + """ + + return self._client.complete_multipart_upload(request, **kwargs) + + def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, **kwargs + ) -> models.AbortMultipartUploadResult: + """ + Cancels a multipart upload task and deletes the parts uploaded in the task. + + Args: + request (AbortMultipartUploadRequest): Request parameters for AbortMultipartUpload operation. + + Returns: + AbortMultipartUploadResult: Reponse result for AbortMultipartUpload operation. + """ + + return self._client.abort_multipart_upload(request, **kwargs) + + def list_parts(self, request: models.ListPartsRequest, **kwargs + ) -> models.ListPartsResult: + """ + Lists all parts that are uploaded by using a specified upload ID. + + Args: + request (ListPartsRequest): Request parameters for ListParts operation. + + Returns: + ListPartsResult: Reponse result for ListParts operation. + """ + + return self._client.list_parts(request, **kwargs) + + def _get_ccbuilder(self, envelope: Envelope ) -> ContentCipherBuilder: + return self._ccbuilders.get(envelope.mat_desc or '', self._defualt_ccbuilder) + + + def _put_object_securely(self, request: models.PutObjectRequest, **kwargs + ) -> models.PutObjectResult: + + cc = self._defualt_ccbuilder.content_cipher() + body = cc.encrypt_content(request.body) + erequest = copy.copy(request) + erequest.body = body + _add_crypto_header_putobject(erequest, cc.get_cipher_data()) + return self._client.put_object(erequest, **kwargs) + + def _get_object_securely(self, request: models.GetObjectRequest, **kwargs + ) -> models.GetObjectResult: + + adjust_range_start = 0 + discard_count = 0 + erequest = request + if request.range_header is not None: + http_range = utils.parse_http_range(request.range_header) + adjust_range_start = _adjust_range_start(http_range[0]) + discard_count = http_range[0] - adjust_range_start + + if discard_count > 0: + erequest = copy.copy(request) + range_start = str(adjust_range_start) if adjust_range_start >= 0 else '' + range_end = str(http_range[1]) if http_range[1] >= 0 else '' + erequest.range_header = f'bytes={range_start}-{range_end}' + erequest.range_behavior = 'standard' + + result = self._client.get_object(erequest, **kwargs) + + try: + if _has_encrypted_header(result.headers): + envelope = _get_envelope_from_header(result.headers) + + if not _is_valid_content_alg(envelope.cek_algorithm or ''): + raise exceptions.ParamInvalidError(field='envelope.cek_algorithm') + + if not envelope.is_valid(): + raise exceptions.ParamInvalidError(field='envelope') + + cc = self._get_ccbuilder(envelope).content_cipher_from_env(envelope, offset=adjust_range_start) + result.body = cast(StreamBody, cc.decrypt_content(result.body)) + + except Exception as err: + if result.body is not None: + result.body.close() + + raise err + + if discard_count > 0: + #rewrite ContentRange & ContentLength + if result.content_range is not None: + crange = utils.parse_content_range(result.content_range) + value = f'bytes {crange[0] + discard_count}-{crange[1]}/{crange[2]}' + result.content_range = value + result.headers.update({"Content-Range": value}) + else: + result.headers.update({"Content-Range": f'bytes {discard_count}-/*'}) + + if result.content_length is not None: + result.content_length -= discard_count + result.headers.update({"Content-Length": str(result.content_length)}) + + result.body = io_utils.StreamBodyDiscarder(result.body, discard_count) + + + return result + + def _initiate_multipart_upload_securely(self, request: models.InitiateMultipartUploadRequest, **kwargs + ) -> models.InitiateMultipartUploadResult: + + _valid_encryption_context(request) + erequest = copy.copy(request) + + cc = self._defualt_ccbuilder.content_cipher() + _add_crypto_header_initpart(erequest, cc.get_cipher_data()) + + result = self._client.initiate_multipart_upload(erequest, **kwargs) + + result.cse_multipart_context = EncryptionMultiPartContext( + content_cipher=cc, + part_size= utils.safety_int(request.cse_part_size), + data_size= utils.safety_int(request.cse_data_size), + ) + + return result + + def _upload_part_securely(self, request: models.UploadPartRequest, **kwargs + ) -> models.UploadPartResult: + + cse_context = request.cse_multipart_context + if cse_context is None: + raise exceptions.ParamNullError( + field='request.cse_multipart_context') + + if (not isinstance(cse_context, EncryptionMultiPartContext) or + not request.cse_multipart_context.is_valid()): + raise exceptions.ParamInvalidError( + field='request.cse_multipart_context') + + if cse_context.part_size % _BLOCK_SIZE_LEN != 0: + raise ValueError(f'EncryptionMultiPartContext.part_size must be aligned to {_BLOCK_SIZE_LEN}') + + offset = 0 + if request.part_number > 1: + offset = (request.part_number - 1) * cse_context.part_size + + cc = cse_context.content_cipher.clone(offset=offset) + + erequest = copy.copy(request) + erequest.body = cc.encrypt_content(request.body) + _add_crypto_header_uploadpart(erequest, cse_context, cc.get_cipher_data()) + + return self._client.upload_part(erequest, **kwargs) + + +def _add_crypto_common_header(headers: MutableMapping, cipher_data: CipherData) -> None: + # mat desc + if len(cipher_data.mat_desc) > 0: + headers['x-oss-meta-client-side-encryption-matdesc'] = cipher_data.mat_desc + + # encrypted key + value = base64.b64encode(cipher_data.encrypted_key) + headers['x-oss-meta-client-side-encryption-key'] = value.decode() + + # encrypted iv + value = base64.b64encode(cipher_data.encrypted_iv) + headers['x-oss-meta-client-side-encryption-start'] = value.decode() + + # wrap alg + headers['x-oss-meta-client-side-encryption-wrap-alg'] = cipher_data.wrap_algorithm + + # cek alg + headers['x-oss-meta-client-side-encryption-cek-alg'] = cipher_data.cek_algorithm + + +def _add_crypto_header_putobject(request: models.PutObjectRequest, cipher_data: CipherData) -> None: + headers = getattr(request, 'headers', None) + if headers is None: + headers = CaseInsensitiveDict() + + # convert content-md5 + if request.content_md5 is not None: + headers['x-oss-meta-client-side-encryption-unencrypted-content-md5'] = request.content_md5 + request.content_md5 = None + + # convert content-length + if request.content_length is not None: + headers['x-oss-meta-client-side-encryption-unencrypted-content-length'] = str(request.content_length) + request.content_length = None + + _add_crypto_common_header(headers, cipher_data) + setattr(request, 'headers', headers) + + +def _add_crypto_header_initpart(request: models.InitiateMultipartUploadRequest, cipher_data: CipherData) -> None: + headers = getattr(request, 'headers', None) + if headers is None: + headers = CaseInsensitiveDict() + + # data size + if utils.safety_int(int(request.cse_data_size)) > 0: + headers['x-oss-meta-client-side-encryption-data-size'] = str(request.cse_data_size) + + # part size + headers['x-oss-meta-client-side-encryption-part-size'] = str(request.cse_part_size) + + _add_crypto_common_header(headers, cipher_data) + + setattr(request, 'headers', headers) + +def _add_crypto_header_uploadpart(request: models.UploadPartRequest, + cse_context: EncryptionMultiPartContext, + cipher_data: CipherData) -> None: + headers = getattr(request, 'headers', None) + if headers is None: + headers = CaseInsensitiveDict() + + # data size + if utils.safety_int(int(cse_context.data_size)) > 0: + headers['x-oss-meta-client-side-encryption-data-size'] = str(cse_context.data_size) + + # part size + headers['x-oss-meta-client-side-encryption-part-size'] = str(cse_context.part_size) + + _add_crypto_common_header(headers, cipher_data) + + setattr(request, 'headers', headers) + + +def _has_encrypted_header(headers: MutableMapping[str, str]) -> bool: + return len(headers.get("x-oss-meta-client-side-encryption-key", '')) > 0 + +def _get_envelope_from_header(headers: MutableMapping[str, str]) -> Envelope: + env = Envelope() + env.iv = base64.b64decode(headers.get("x-oss-meta-client-side-encryption-start", '')) + env.cipher_key = base64.b64decode(headers.get("x-oss-meta-client-side-encryption-key", '')) + env.mat_desc = headers.get("x-oss-meta-client-side-encryption-matdesc", '') + env.cek_algorithm = headers.get("x-oss-meta-client-side-encryption-cek-alg", '') + env.wrap_algorithm = headers.get("x-oss-meta-client-side-encryption-wrap-alg", '') + env.unencrypted_md5 = headers.get("x-oss-meta-client-side-encryption-unencrypted-content-md5", '') + env.unencrypted_content_length = headers.get("x-oss-meta-client-side-encryption-unencrypted-content-length", '') + return env + +def _get_envelope_from_list_parts(result: models.ListPartsResult) -> Envelope: + env = Envelope() + env.iv = base64.b64decode(utils.safety_str(result.client_encryption_start)) + env.cipher_key = base64.b64decode(utils.safety_str(result.client_encryption_key)) + env.cek_algorithm = utils.safety_str(result.client_encryption_cek_alg) + env.wrap_algorithm = utils.safety_str(result.client_encryption_wrap_alg) + env.mat_desc = '' + return env + +def _is_valid_content_alg(alg_name:str) -> bool: + #now content encyrption only support aec/ctr algorithm + return alg_name == 'AES/CTR/NoPadding' + +def _adjust_range_start(start): + return (start // _BLOCK_SIZE_LEN) * _BLOCK_SIZE_LEN + +def _valid_encryption_context(request: models.InitiateMultipartUploadRequest) -> None: + part_size = request.cse_part_size + if part_size is None: + raise exceptions.ParamNullError(field='request.cse_part_size') + + if not isinstance(part_size, int): + raise TypeError(f'request.cse_part_size need int, but got {type(part_size)}') + + if part_size <= 0: + raise exceptions.ParamInvalidError(field='request.cse_part_size') + + if not 0 == part_size % _BLOCK_SIZE_LEN: + raise ValueError(f'request.CSEPartSize must aligned to the {_BLOCK_SIZE_LEN}') + + data_size = request.cse_data_size + if data_size is not None and not isinstance(data_size, int): + raise TypeError(f'request.cse_data_size need int, but got {type(data_size)}') diff --git a/alibabacloud_oss_v2/endpoints.py b/alibabacloud_oss_v2/endpoints.py new file mode 100644 index 0000000..e347119 --- /dev/null +++ b/alibabacloud_oss_v2/endpoints.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +import re +import socket + +from . import defaults + +SCHEME_REGEXP = re.compile(r"^([^:]+)://") + +def add_scheme(endpoint:str, disable_ssl:bool) -> str: + """Add scheme to endpoint if not exist""" + if endpoint != "" and not SCHEME_REGEXP.match(endpoint): + scheme = defaults.DEFAULT_ENDPOINT_SCHEME + if disable_ssl: + scheme = 'http' + endpoint = f'{scheme}://{endpoint}' + return endpoint + + +def from_region(region:str, disable_ssl:bool, etype:str) -> str: + """builds endpoint from region, ssl and endpoint type""" + scheme = defaults.DEFAULT_ENDPOINT_SCHEME + if disable_ssl: + scheme = 'http' + + if etype == 'internal': + endpoint = f'oss-{region}-internal.aliyuncs.com' + elif etype == "dualstack": + endpoint = f'{region}.oss.aliyuncs.com' + elif etype == 'accelerate': + endpoint = 'oss-accelerate.aliyuncs.com' + elif etype == 'overseas': + endpoint = 'oss-accelerate-overseas.aliyuncs.com' + else: + endpoint = f'oss-{region}.aliyuncs.com' + + endpoint = f'{scheme}://{endpoint}' + + return endpoint + + +def is_ip(hostname:str): + """Check whether the host name is an IP address.""" + is_ipv6 = False + right_bracket_index = hostname.find(']') + if hostname[0] == '[' and right_bracket_index > 0: + loc = hostname[1:right_bracket_index] + is_ipv6 = True + else: + loc = hostname.split(':')[0] + + try: + if is_ipv6: + socket.inet_pton(socket.AF_INET6, loc) # IPv6 + else: + socket.inet_aton(loc) # Only IPv4 + except socket.error: + return False + + return True diff --git a/alibabacloud_oss_v2/exceptions.py b/alibabacloud_oss_v2/exceptions.py new file mode 100644 index 0000000..643da66 --- /dev/null +++ b/alibabacloud_oss_v2/exceptions.py @@ -0,0 +1,225 @@ +"""_summary_ +""" + + +def _exception_from_packed_args(exception_cls, args=None, kwargs=None): + if args is None: + args = () + if kwargs is None: + kwargs = {} + return exception_cls(*args, **kwargs) + + +class BaseError(Exception): + """ + The base exception class for oss sdk exceptions. + + :ivar msg: The descriptive message associated with the error. + """ + + fmt = 'An unspecified error occurred' + + def __init__(self, **kwargs): + msg = self.fmt.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + def __reduce__(self): + return _exception_from_packed_args, (self.__class__, None, self.kwargs) + + +class CredentialsBaseError(BaseError): + """ + The base exception class for oss sdk exceptions. + """ + + +class CredentialsEmptyError(CredentialsBaseError): + """ + The access key or access key secret associated with a credentials is not exist. + """ + + fmt = 'Credentials is null or empty' + + +class CredentialsFetchError(CredentialsBaseError): + """ + Fetch Credentials error. + """ + fmt = 'Fetch Credentials raised an exception: {error}' + + +class StreamConsumedError(BaseError): + """ + Stream Consumed Error. + """ + fmt = 'You have likely already consumed this stream, so it can not be accessed anymore.' + + +class StreamClosedError(BaseError): + """ + Stream Closed Error. + """ + fmt = 'The content for response can no longer be read or streamed.' + + +class ResponseNotReadError(BaseError): + """ + Response Not ReadError. + """ + fmt = 'You have not read in the bytes for the response. Call .read() on the response first.' + + +class RequestError(BaseError): + """An error occurred while attempt to make a request to the service. + No request was sent. + """ + fmt = 'request error: {error}.' + + def __init__(self, **kwargs): + BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + + +class ResponseError(BaseError): + """The request was sent, but the client failed to understand the response. + The connection may have timed out. These errors can be retried for idempotent or safe operations + """ + fmt = 'response error: {error}.' + + def __init__(self, **kwargs): + BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + +class ServiceError(BaseError): + """ + The exception class for error from oss service. + """ + fmt = 'Error returned by Service.\n\ + Http Status Code: {status_code}.\n\ + Error Code: {code}.\n\ + Request Id: {request_id}.\n\ + Message: {message}.\n\ + EC: {ec}.\n\ + Timestamp: {timestamp}.\n\ + Request Endpoint: {request_target}.' + + def __init__(self, **kwargs): + BaseError.__init__(self, **kwargs) + self.status_code = kwargs.get("status_code", 0) + self.code = kwargs.get("code", None) + self.message = kwargs.get("message", None) + self.request_id = kwargs.get("request_id", None) + self.ec = kwargs.get("ec", None) + self.timestamp = kwargs.get("timestamp", None) + self.request_target = kwargs.get("request_target", None) + self.snapshot = kwargs.get("snapshot", None) + self.headers = kwargs.get("headers", None) + self.error_fileds = kwargs.get("error_fileds", None) + + +class ParamInvalidError(BaseError): + """ + Param Invalid Error. + """ + fmt = 'invalid field, {field}.' + + +class ParamNullError(BaseError): + """ + Param Null Error. + """ + fmt = 'null field, {field}.' + + +class ParamNullOrEmptyError(BaseError): + """ + Param Null or Empty Error. + """ + fmt = 'null or empty field, {field}.' + + +class ParamRequiredError(BaseError): + """ + Param Required Error. + """ + fmt = 'missing required field, {field}.' + + +class OperationError(BaseError): + """ + Operation Error. + """ + fmt = 'operation error {name}: {error}.' + + def __init__(self, **kwargs): + BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + + +class MD5UnavailableError(BaseError): + """ + MD5 Unavailable Error. + """ + fmt = "This system does not support MD5 generation." + + +class SerializationError(BaseError): + """Raised if an error is encountered during serialization.""" + fmt = 'Serialization raised an exception: {error}' + + +class DeserializationError(BaseError): + """Raised if an error is encountered during deserialization.""" + fmt = 'Deserialization raised an exception: {error}' + + +class BucketNameInvalidError(BaseError): + """ + Param Invalid Error. + """ + fmt = 'Bucket name is invalid, got {name}.' + + +class ObjectNameInvalidError(BaseError): + """ + Param Invalid Error. + """ + fmt = 'Bucket name is invalid.' + + +class InconsistentError(BaseError): + """ + crc check Error. + """ + fmt = 'crc is inconsistent, client {client_crc}, server {server_crc}' + +class PresignExpirationError(BaseError): + """ + Presign Expiration Error. + """ + fmt = 'expires should be not greater than 604800(seven days)' + +class FileNotExist(BaseError): + """ + File not exists. + """ + fmt = 'File not exists, {filepath}' + +class FileNotReadable(BaseError): + """ + File is not readable. + """ + fmt = 'File is not readable, {filepath}' diff --git a/alibabacloud_oss_v2/filelike.py b/alibabacloud_oss_v2/filelike.py new file mode 100644 index 0000000..2a6ac54 --- /dev/null +++ b/alibabacloud_oss_v2/filelike.py @@ -0,0 +1,908 @@ +# pylint: disable=line-too-long +import abc +import queue +import threading +from typing import Optional, Iterator, List, Generator +from concurrent.futures import ThreadPoolExecutor, Future +from .types import StreamBody, BodyType +from . import models +from . import exceptions +from . import utils + + +DEFAULT_BUFFER_SIZE = 8 * 1024 + +class PathError(exceptions.BaseError): + """ + PathError records an error and the operation and file path that caused it. + """ + fmt = 'path error {op} {path}: {error}.' + + def __init__(self, **kwargs): + exceptions.BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + + +class AppendFileAPIClient(abc.ABC): + """Abstract base class for append file client.""" + + @abc.abstractmethod + def head_object(self, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """ Queries information about the object in a bucket.""" + + @abc.abstractmethod + def append_object(self, request: models.AppendObjectRequest, **kwargs) -> models.AppendObjectResult: + """ + Uploads an object by appending the object to an existing object. + Objects created by using the AppendObject operation are appendable objects. + """ + + +class AppendOnlyFile: + """AppendOnlyFile opens or creates the named file for appending""" + + def __init__( + self, + client: AppendFileAPIClient, + bucket: str, + key: str, + request_payer: Optional[str] = None, + create_parameter: Optional[models.AppendObjectRequest] = None, + ) -> None: + """ + client (AppendFileAPIClient, required): A agent that sends the request. + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + create_parameter (AppendObjectRequest, optional): The parameters when the object is first generated, supports below + CacheControl, ContentEncoding, Expires, ContentType, ContentType, Metadata,SSE's parameters, Acl, StorageClass, Tagging. + If the object exists, ignore this parameters + """ + self._client = client + + # object info + self._bucket = bucket + self._key = key + self._request_payer = request_payer + self._create_parameter = create_parameter + + self._created = False + + # current write position + self._offset = 0 + self._hash_crc64 = None + + self._try_open_object(bucket, key, request_payer) + + self._closed = False + + @property + def mode(self) -> str: + """String giving the file mode""" + return 'ab' + + @property + def name(self) -> str: + """String giving the file in oss path""" + return f'oss://{self._bucket}/{self._key}' + + @property + def closed(self) -> bool: + """True if the file descriptor will be closed by close().""" + return self._closed + + ### Context manager ### + def __enter__(self) -> 'AppendOnlyFile': + self._check_closed('enter') + return self + + def __exit__(self, type, value, traceback) -> None: + self.close() + + ### io apis ### + def close(self) -> None: + """Close the file.""" + self._closed = True + + def flush(self) -> None: + """Flush write buffers. + """ + self._check_closed('flush') + if not self._created: + self._write_bytes(b'') + + def tell(self) -> int: + """Return an int indicating the current stream position.""" + self._check_closed('tell') + return self._offset + + def writable(self) -> bool: + """True if file was opened in a write mode.""" + self._check_closed('writable') + return True + + def write(self, b): + """Write bytes b to file, return number written. + """ + self._check_closed('write') + + if b is None: + return 0 + + if not isinstance(b, bytes): + raise self._wrap_error('write', TypeError(f'Not a bytes type, got {type(b)}')) + + return self._write_bytes(b) + + + def write_from(self, b:BodyType): + """Write any data to file, return number written. + """ + self._check_closed('write') + + if b is None: + return 0 + + return self._write_any(b) + + + def _try_open_object(self, bucket: str, key: str, request_payer:Optional[str]) -> None: + try: + result = self._client.head_object(models.HeadObjectRequest( + bucket=bucket, + key=key, + request_payer=request_payer + )) + except exceptions.OperationError as err: + serr = err.unwrap() + if isinstance(serr, exceptions.ServiceError): + if serr.status_code == 404: + # not found + return + raise err + + if utils.safety_str(result.object_type).lower() != "appendable": + raise self._wrap_error('open', ValueError('Not a appendable file')) + + self._created = True + self._offset = result.content_length + self._hash_crc64 = result.hash_crc64 + + def _check_closed(self, op): + """Internal: raise a ValueError if file is closed + """ + if self.closed: + raise self._wrap_error(op, ValueError("I/O operation on closed file.")) + + def _apply_create_param_if_need(self, request: models.AppendObjectRequest): + if self._created or self._create_parameter is None: + return + + request.acl = self._create_parameter.acl + request.storage_class = self._create_parameter.storage_class + + request.cache_control = self._create_parameter.cache_control + request.content_disposition = self._create_parameter.content_disposition + request.content_encoding = self._create_parameter.content_encoding + request.expires = self._create_parameter.expires + request.content_type = self._create_parameter.content_type + request.server_side_encryption = self._create_parameter.server_side_encryption + request.server_side_data_encryption = self._create_parameter.server_side_data_encryption + request.sse_kms_key_id = self._create_parameter.sse_kms_key_id + request.metadata = self._create_parameter.metadata + request.tagging = self._create_parameter.tagging + + def _write_bytes(self, b): + offset = self._offset + hash_crc64 = self._hash_crc64 + error: Exception = None + request = models.AppendObjectRequest( + bucket=self._bucket, + key=self._key, + position=offset, + request_payer=self._request_payer, + body=b + ) + + self._apply_create_param_if_need(request) + + try: + result = self._client.append_object(request) + offset = result.next_position + hash_crc64 = result.hash_crc64 + except Exception as err: + error = err + if isinstance(err, exceptions.OperationError): + serr = err.unwrap() + if isinstance(serr, exceptions.ServiceError): + if serr.code == 'PositionNotEqualToLength': + next_append = self._next_append_stat() + if next_append[0] >= 0 and offset + len(b) == next_append[0]: + error = None + offset = next_append[0] + hash_crc64 = next_append[1] + + if error: + raise self._wrap_error('write', error) + + writern = offset - self._offset + self._created = True + self._offset = offset + self._hash_crc64 = hash_crc64 + + return writern + + def _write_any(self, b): + offset = self._offset + hash_crc64 = self._hash_crc64 + error: Exception = None + request = models.AppendObjectRequest( + bucket=self._bucket, + key=self._key, + position=offset, + request_payer=self._request_payer, + body=b + ) + + blen = utils.guess_content_length(b) + + self._apply_create_param_if_need(request) + + try: + result = self._client.append_object(request) + offset = result.next_position + hash_crc64 = result.hash_crc64 + except Exception as err: + error = err + if isinstance(err, exceptions.OperationError): + serr = err.unwrap() + if isinstance(serr, exceptions.ServiceError): + if serr.code == 'PositionNotEqualToLength' and blen is not None: + next_append = self._next_append_stat() + if next_append[0] >= 0 and offset + blen == next_append[0]: + error = None + offset = next_append[0] + hash_crc64 = next_append[1] + + if error: + raise self._wrap_error('write', error) + + writern = offset - self._offset + self._created = True + self._offset = offset + self._hash_crc64 = hash_crc64 + + return writern + + + def _next_append_stat(self): + try: + result = self._client.head_object(models.HeadObjectRequest( + bucket=self._bucket, + key=self._key, + request_payer=self._request_payer + )) + return result.content_length, result.hash_crc64 + except Exception: + pass + return -1, None + + def _wrap_error(self, op: str, error: Exception) -> Exception: + return PathError( + op = op, + path=f'oss://{self._bucket}/{self._key}', + error=error + ) + + + +class OpenFileAPIClient(abc.ABC): + """Abstract base class for open file client.""" + + @abc.abstractmethod + def head_object(self, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """ Queries information about the object in a bucket.""" + + @abc.abstractmethod + def get_object(self, request: models.GetObjectRequest, **kwargs) -> models.GetObjectResult: + """ + Queries an object. To call this operation, you must have read permissions on the object. + """ + +class ReadOnlyFile: + """ReadOnlyFile opens the named file for reading.""" + + def __init__( + self, + client: OpenFileAPIClient, + bucket: str, + key: str, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs + ) -> None: + """ + client (OpenFileAPIClient, required): A agent that sends the request. + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + """ + self._client = client + + # object info + self._bucket = bucket + self._key = key + self._version_id = version_id + self._request_payer = request_payer + + #Source's Info + self._size_in_bytes = None + self._modtime = None + self._etag = None + self._headers = None + self._stat_object() + + # current read position + self._offset = 0 + + # chunk remains buffer + self._read_buf = None + self._read_buf_offset = 0 + + # stream reader + self._stream_reader: StreamBody = None + self._stream_iter: Iterator = None + + # prefetch parameters + self._enable_prefetch = kwargs.get('enable_prefetch', False) + self._prefetch_num = kwargs.get('prefetch_num', 3) + self._chunk_size = kwargs.get('chunk_size', 6 * 1024 *1024) + self._prefetch_threshold = kwargs.get('prefetch_threshold', 20 * 1024 *1024) + self._block_size = kwargs.get('block_size', None) + + # aysnc readers for prefetch + self._executor: ThreadPoolExecutor = None + self._generator: Generator = None + self._prefetch_readers: List['_PrefetchDelegate'] = [] + + # number of sequential read + self._seq_read_amount = 0 + # number of out of order read + self._num_ooo_read = 0 + + self._closed = False + self._readable = True + self._seekable = True + + + @property + def mode(self) -> str: + """String giving the file mode""" + return 'rb' + + @property + def name(self) -> str: + """String giving the file in oss path""" + if self._version_id: + return f'oss://{self._bucket}/{self._key}?versionId={self._version_id}' + return f'oss://{self._bucket}/{self._key}' + + @property + def closed(self) -> bool: + """True if the file descriptor will be closed by close().""" + return self._closed + + ### Context manager ### + def __enter__(self) -> 'ReadOnlyFile': + self._check_closed('enter') + return self + + def __exit__(self, type, value, traceback) -> None: + self.close() + + ### io apis ### + def read(self, n=None): + """Read and return up to n bytes, where n is an int. + Return an empty bytes object at EOF. + """ + self._check_closed('read') + self._check_readable('read') + d = self._read_at(self._offset, n) + self._offset += len(d) + return d + + def readall(self): + """Read until EOF""" + return self.read() + + def readinto(self, b): + """Read bytes into a pre-allocated bytes-like object b. + Returns an int representing the number of bytes read (0 for EOF) + """ + self._check_closed('read') + self._check_readable('read') + n = self._read_at_into(self._offset, b) + self._offset += n + return n + + def seek(self, pos, whence=0): + """Move to new file position. + + Argument offset is a byte count. Optional argument whence defaults to + SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values + are SEEK_CUR or 1 (move relative to current position, positive or negative), + and SEEK_END or 2 (move relative to end of file, usually negative, although + many platforms allow seeking beyond the end of a file). + + """ + self._check_closed('seek') + try: + pos_index = pos.__index__ + except AttributeError as exc: + raise self._wrap_error('seek', TypeError(f"{pos!r} is not an integer")) from exc + else: + pos = pos_index() + + off = 0 + if whence == 0: + off = pos + elif whence == 1: + off = self._offset + pos + elif whence == 2: + off = self._size_in_bytes + pos + else: + raise self._wrap_error('seek', ValueError("unsupported whence value")) + + if off < 0: + raise self._wrap_error('seek', ValueError(f"negative seek position {off}")) + + if off > self._size_in_bytes: + raise self._wrap_error('seek', ValueError(f"offset is unavailable {off}")) + + self._offset = off + + return off + + + def tell(self): + """Return an int indicating the current stream position.""" + self._check_closed('tell') + return self._offset + + def close(self) -> None: + """Close the file.""" + if self._closed: + return + + self._close_readers() + + if self._executor: + self._executor.shutdown() + + self._prefetch_readers = None + self._executor = None + self._closed = True + + def seekable(self): + """True if file supports random-access.""" + self._check_closed('seekable') + return self._seekable + + def readable(self): + """True if file was opened in a read mode.""" + self._check_closed('readable') + return self._readable + + def _check_readable(self, op): + if not self._readable: + raise self._wrap_error(op, ValueError("File not open for reading.")) + + def _check_closed(self, op): + """Internal: raise a ValueError if file is closed + """ + if self.closed: + raise self._wrap_error(op, ValueError("I/O operation on closed file.")) + + def _close_readers(self): + if self._generator: + self._generator.close() + self._generator = None + + self._close_readers1() + + + def _close_readers1(self): + # inner buffer + self._read_buf = None + #self._read_buf_offset = 0 + + if self._stream_reader: + self._stream_reader.close() + self._stream_iter = None + self._stream_reader = None + + for r in self._prefetch_readers: + r.close() + + self._prefetch_readers = [] + + + def _stat_object(self) -> None: + try: + result = self._client.head_object(models.HeadObjectRequest( + bucket=self._bucket, + key=self._key, + version_id=self._version_id, + request_payer=self._request_payer + )) + except Exception as err: + raise self._wrap_error('stat_object', err) + + self._size_in_bytes = result.content_length + self._modtime = result.last_modified + self._etag = result.etag + self._headers = result.headers + + def _read_at(self, offset, n): + nodata_val = b"" + empty_values = (b"") + + if offset >= self._size_in_bytes: + return nodata_val + + # Special case for when the number of bytes to read is unspecified. + if n is None or n < 0: + current_size = 0 + chunks = [] + while True: + chunk = self._next_chunk(offset + current_size) + if chunk is None: + continue + if chunk in empty_values: + nodata_val = chunk + break + current_size += len(chunk) + chunks.append(chunk) + return b"".join(chunks) or nodata_val + + # The number of bytes to read is specified, return at most n bytes. + b = bytearray(n.__index__()) + got = self._read_at_into(offset, b) + if got is None: + return None + del b[got:] + return bytes(b) + + def _read_at_into(self, offset, buf): + """Read data into *buf*""" + if offset >= self._size_in_bytes: + return 0 + + if not isinstance(buf, memoryview): + buf = memoryview(buf) + if buf.nbytes == 0: + return 0 + buf = buf.cast('B') + written = 0 + while written < len(buf): + chunk = self._next_chunk(offset + written) + if chunk is None: + continue + # eof + if chunk == b'': + break + remains = len(buf) - written + n = min(remains, len(chunk)) + buf[written:written + n] = chunk[:n] + + # Save the extra data in the buffer. + if n < len(chunk): + self._read_buf = chunk[n:] + rn = len(self._read_buf) + self._read_buf_offset -= rn + self._seq_read_amount -= rn + + written += n + return written + + def _next_chunk_direct(self, offset): + if offset >= self._size_in_bytes: + return b'' + + if not self._stream_reader: + result = self._client.get_object(models.GetObjectRequest( + bucket=self._bucket, + key=self._key, + version_id=self._version_id, + request_payer=self._request_payer, + range_header=f'bytes={offset}-', + range_behavior='standard' + )) + + self._assert_same(offset, result) + self._stream_reader = result.body + self._stream_iter = result.body.iter_bytes() + + ret = None + try: + ret = next(self._stream_iter) + except StopIteration: + ret = b'' + except Exception: + #returns None and try again + self._stream_reader.close() + self._stream_reader = None + self._stream_iter = None + + return ret + + def _prefetch_generator(self, offset): + if not self._executor: + self._executor = ThreadPoolExecutor(self._prefetch_num) + + self._close_readers1() + prefetch_num = max(1, self._prefetch_num) + + for start in range(offset, self._size_in_bytes, self._chunk_size): + self._prefetch_readers.append(_PrefetchDelegate(self, start)) + if len(self._prefetch_readers) < prefetch_num: + continue + + # read data from first reader + reader = self._prefetch_readers[0] + curr_iter = iter(reader) + for d in curr_iter: + if reader.failed: + raise ValueError("Meets error, fall back to read serially") + yield d + + reader.close() + del self._prefetch_readers[0] + + # remians + for reader in self._prefetch_readers: + curr_iter = iter(reader) + for d in curr_iter: + if reader.failed: + raise ValueError("Meets error, fall back to read serially") + yield d + reader.close() + + self._prefetch_readers = [] + + + def _next_chunk(self, offset): + if self._read_buf_offset != offset: + self._read_buf_offset = offset + self._seq_read_amount = 0 + + if self._generator: + self._num_ooo_read += 1 + + self._close_readers() + + if self._read_buf: + data = self._read_buf + self._read_buf = None + else: + + # switch to prefetch reader + if (self._enable_prefetch and + self._seq_read_amount >= self._prefetch_threshold and + self._num_ooo_read < 3): + + if not self._generator: + self._generator = self._prefetch_generator(offset) + + try: + data = next(self._generator) + except StopIteration: + data = b'' + except Exception: + # fall back to read serially + self._seq_read_amount = 0 + self._close_readers() + data = self._next_chunk_direct(offset) + else: + data = self._next_chunk_direct(offset) + + if data is not None: + cn = len(data) + self._read_buf_offset += cn + self._seq_read_amount += cn + + return data + + def _assert_same(self, offset: int, result: models.GetObjectResult): + err = _check_object_same(self._modtime, self._etag, offset, result) + if err: + raise self._wrap_error('get_object', err) + + def _wrap_error(self, op: str, error: Exception) -> Exception: + return PathError( + op = op, + path=f'oss://{self._bucket}/{self._key}', + error=error + ) + +class CancelTask(Exception): + 'Exception raised by cancel prefetch task.' + pass + +class _PrefetchDelegate: + + def __init__( + self, + base: ReadOnlyFile, + offset: str, + ) -> None: + self._base = base + self._offset = offset + self._block_size = base._block_size + + self._data_queue = queue.Queue() + self._get_timeout = 0.1 + + self._canceling = False + self._closed = False + self._stream_reader: StreamBody = None + self._stream_iter: Iterator = None + self._condition = threading.Condition() + + # source info + self._modtime = base._modtime + self._etag = base._etag + + # task info + size = min(base._size_in_bytes - offset, base._chunk_size) + self._request =models.GetObjectRequest( + bucket=base._bucket, + key=base._key, + version_id=base._version_id, + request_payer=base._request_payer + ) + self._failed = False + self._task = self._base._executor.submit(self._download_part, (offset, size)) + + @property + def failed(self) -> bool: + """True if the delegate meets error.""" + return self._failed + + @property + def closed(self) -> bool: + """True if the delegate will be closed.""" + return self._closed + + def __iter__(self): + return self + + def __next__(self): + try: + d = self._data_queue.get(timeout=self._get_timeout) + if d in (b''): + raise StopIteration + return d + except queue.Empty: + return None + + def get_task(self) -> Future: + """_summary_ + + Returns: + Future: _description_ + """ + return self._task + + def close(self): + """_summary_ + """ + if self._closed: + return + + if not self._task.cancel(): + # running or done + with self._condition: + self._canceling = True + + # wait task done + try: + self._task.result() + except Exception: + pass + + # release all + if self._stream_reader: + self._stream_reader.close() + + if self._data_queue: + self._data_queue.queue.clear() + + self._stream_reader = None + self._stream_iter = None + self._data_queue = None + self._closed = True + + + def _download_part(self, part): + start = part[0] + size = part[1] + try: + self._download_part_check_cancel(start, size) + except Exception: + self._failed = True + + + def _download_part_check_cancel(self, start, size): + got = 0 + error: Exception = None + request = self._request + + while True: + if self._canceling: + error = CancelTask() + break + + request.range_header = f'bytes={start + got}-{start + size - 1}' + request.range_behavior = 'standard' + result = self._base._client.get_object(request) + + error = _check_object_same(self._modtime, self._etag, start, result) + if error: + break + + try: + kwargs = {} + if self._block_size: + kwargs['block_size'] = self._block_size + with self._condition: + self._stream_reader = result.body + self._stream_iter = result.body.iter_bytes(**kwargs) + if self._canceling: + error = CancelTask() + break + + for d in self._stream_iter: + if self._canceling: + error = CancelTask() + break + got += len(d) + self._data_queue.put(d) + break + except Exception: + pass + finally: + if self._stream_reader: + self._stream_reader.close() + self._stream_reader = None + self._stream_iter = None + + if error: + raise error + + if got != size: + raise ValueError("expect size {size}, but got {got}") + + self._data_queue.put(b'') + +def _check_object_same(src_modtime, src_etag, offset: int, result: models.GetObjectResult): + modtime = result.last_modified + etag = result.etag + got_offset = 0 + if (crange := result.headers.get("Content-Range", None)): + content_range = utils.parse_content_range(crange) + got_offset = content_range[0] + + if got_offset != offset: + return ValueError(f"Range get fail, expect offset:{offset}, got offset:{got_offset}") + + if ((modtime and src_modtime and modtime != src_modtime) or + (etag and src_etag and etag != src_etag)): + return ValueError(f"Source file is changed, origin info[{src_modtime},{src_etag}], new info [{modtime},{etag}]") + + return None diff --git a/alibabacloud_oss_v2/io_utils.py b/alibabacloud_oss_v2/io_utils.py new file mode 100644 index 0000000..19f48b0 --- /dev/null +++ b/alibabacloud_oss_v2/io_utils.py @@ -0,0 +1,594 @@ +"""utils for stream wrapper""" +import os +import sys +import errno +import threading +from typing import Optional, IO, List, Iterable, Any, AnyStr, Iterator +from . import utils +from .types import StreamBody, HttpResponse + +# pylint: disable=no-member +# pylint: disable=protected-access + + +class TeeIterator: + """A Iterator that writes to w what it reads from source + """ + + def __iter__(self): + return self.iter_bytes() + + def __next__(self): + d = self.next() + if self._writers is not None: + for w in self._writers: + w.write(d) + return d + + def seekable(self): + """_summary_ + """ + return self._seekable + + def reset(self) -> None: + """_summary_ + """ + if self._writers is not None: + for w in self._writers: + if hasattr(self._writers, 'reset'): + w.reset() + + @staticmethod + def from_source(source: Any, writers: List[Any], **kwargs: Any) -> "TeeIterator": + """Converts source to TeeIterator + + Args: + source (Any): what it reads from + writers (List[Any]): what it writes to + + Raises: + TypeError: If the type of source is is not supported, raises error. + + Returns: + TeeIterator: A Iterator that writes to w what it reads from source + """ + + block_size = kwargs.get("block_size", 32 * 1024) + + if isinstance(source, str): + return _TeeIteratorStr(source, writers, block_size) + + if isinstance(source, bytes): + return _TeeIteratorBytes(source, writers, block_size) + + # file-like object + if hasattr(source, 'seek') and hasattr(source, 'read'): + data_len = utils.guess_content_length(source) + if data_len is not None: + return _TeeIteratorIOLen(source, data_len, writers, block_size) + return _TeeIteratorIO(source, writers, block_size) + + if isinstance(source, Iterable): + return _TeeIteratorIter(source, writers) + + raise TypeError( + f'Invalid type for body. Expected str, bytes, file-like object, got {type(source)}') + + +class _TeeIteratorStr(TeeIterator): + """_summary_ + """ + + def __init__( + self, + data: str, + writers: List[Any], + block_size: Optional[int] = None + ) -> None: + self._data = data + self._writers = writers + self._block_size = block_size + self._offset = 0 + self._total = 0 + self._seekable = True + self._content = None + + def __len__(self): + return len(self._data) + + def iter_bytes(self): + """_summary_ + """ + self._content = self._data.encode() + self._total = len(self._content) + self._offset = 0 + return self + + def next(self): + """_summary_ + """ + if self._offset >= self._total: + raise StopIteration + + remains = self._total - self._offset + remains = min(self._block_size, remains) + + ret = self._content[self._offset: self._offset + remains] + self._offset += remains + + return ret + + +class _TeeIteratorBytes(TeeIterator): + """_summary_ + """ + + def __init__( + self, + data: bytes, + writers: List[Any], + block_size: Optional[int] = None + ) -> None: + self._data = data + self._writers = writers + self._block_size = block_size + self._offset = 0 + self._total = 0 + self._seekable = True + self._content = None + + def __len__(self): + return len(self._data) + + def iter_bytes(self): + """_summary_ + """ + self._content = self._data + self._total = len(self._content) + self._offset = 0 + return self + + def next(self): + """_summary_ + """ + if self._offset >= self._total: + raise StopIteration + + remains = self._total - self._offset + remains = min(self._block_size, remains) + + ret = self._content[self._offset: self._offset + remains] + self._offset += remains + + return ret + +class _TeeIteratorIOLen(TeeIterator): + """_summary_ + """ + + def __init__( + self, + data: IO, + total: int, + writers: List[Any], + block_size: Optional[int] = None + ) -> None: + self._data = data + self._total = total + self._writers = writers + self._block_size = block_size + seekable = is_seekable_io(data) + self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR) + self._seekable = seekable + + def __len__(self): + return self._total + + def iter_bytes(self): + """_summary_ + """ + if self._seekable: + self._data.seek(self._start_offset, os.SEEK_SET) + + return self + + def next(self): + """_summary_ + """ + d = self._data.read(self._block_size) + + if d: + return d + + raise StopIteration + +class _TeeIteratorIO(TeeIterator): + """_summary_ + """ + + def __init__( + self, + data: IO, + writers: List[Any], + block_size: Optional[int] = None + ) -> None: + self._data = data + self._writers = writers + self._block_size = block_size + + seekable = is_seekable_io(data) + self._start_offset = 0 if not seekable else data.seek(0, os.SEEK_CUR) + self._total = utils.guess_content_length(data) + self._seekable = seekable + + if self._total is not None: + setattr(self, '__len__', lambda x: x._total) + + def iter_bytes(self): + """_summary_ + """ + if self._seekable: + self._data.seek(self._start_offset, os.SEEK_SET) + + return self + + def next(self): + """_summary_ + """ + d = self._data.read(self._block_size) + + if d: + return d + + raise StopIteration + + +class _TeeIteratorIter(TeeIterator): + """_summary_ + """ + + def __init__( + self, + data: Iterable[bytes], + writers: List[Any], + ) -> None: + self._data = data + self._writers = writers + self._seekable = True + self._iter = None + + def iter_bytes(self): + """_summary_ + """ + self._iter = iter(self._data) + return self + + def next(self): + """_summary_ + """ + return next(self._iter) + + +def is_seekable_io(fileobj): + """_summary_ + """ + if hasattr(fileobj, 'seekable'): + return fileobj.seekable() + + if hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'): + try: + fileobj.seek(0, os.SEEK_CUR) + return True + except OSError: + return False + + return False + + +if sys.platform.startswith('win'): + def rename_file(current_filename, new_filename): + try: + os.remove(new_filename) + except OSError as e: + if e.errno != errno.ENOENT: + raise + os.rename(current_filename, new_filename) +else: + rename_file = os.rename + + +class ReadAtReader: + """A IO that implements read_at + """ + def __init__( + self, + reader: IO[bytes], + ) -> None: + self._reader = reader + self._readat_lock = threading.Lock() + + @property + def mode(self) -> str: + """_summary_ + """ + return self._reader.mode + + @property + def name(self) -> str: + """_summary_ + """ + return f'{self._reader.name} with read_at' + + def close(self) -> None: + """_summary_ + """ + self._reader.close() + + @property + def closed(self) -> bool: + """_summary_ + """ + return self._reader.closed + + def fileno(self) -> int: + """_summary_ + """ + return self._reader.fileno() + + def flush(self) -> None: + """_summary_ + """ + self._reader.flush() + + def isatty(self) -> bool: + """_summary_ + """ + return self._reader.isatty() + + def read(self, n: int = -1) -> AnyStr: + """_summary_ + """ + return self._reader.read(n) + + def read_at(self, off: int, n: int = -1) -> AnyStr: + """_summary_ + """ + with self._readat_lock: + self._reader.seek(off) + return self._reader.read(n) + + def readable(self) -> bool: + """_summary_ + """ + return self._reader.readable() + + def readline(self, limit: int = -1) -> AnyStr: + """_summary_ + """ + return self._reader.readline(limit) + + def readlines(self, hint: int = -1) -> List[AnyStr]: + """_summary_ + """ + return self._reader.readlines(hint) + + def seek(self, offset: int, whence: int = 0) -> int: + """_summary_ + """ + return self._reader.seek(offset, whence) + + def seekable(self) -> bool: + """_summary_ + """ + return self._reader.seekable() + + def tell(self) -> int: + """_summary_ + """ + return self._reader.tell() + + def __enter__(self) -> 'IO[AnyStr]': + self._reader.__enter__() + return self + + def __exit__(self, type_, value, traceback) -> None: + self._reader.__exit__(type_, value, traceback) + +class SectionReader: + """ + A SectionReader that reads from r starting at offset off and stops with EOF after n bytes + """ + def __init__( + self, + reader: ReadAtReader, + off: int, + n: int, + ) -> None: + self._reader = reader + if off <= sys.maxsize-n: + remaining = n + off + else: + remaining = sys.maxsize + self._base = off + self._off = off + self._limit = remaining + + def read(self, n: int = -1) -> AnyStr: + """_summary_ + """ + if self._off >= self._limit: + return b'' + + max_size = self._limit - self._off + if n < 0 or n > max_size: + n = max_size + + d = self._reader.read_at(self._off, n) + self._off += len(d) + return d + + def read_at(self, off: int, n: int = -1) -> AnyStr: + """_summary_ + """ + if off < 0 or off >= self._limit - self._base: + return b'' + + off += self._base + max_size = self._limit - off + if n < 0 or n > max_size: + n = max_size + + return self._reader.read_at(self._off, n) + + def readable(self) -> bool: + """_summary_ + """ + return self._reader.readable() + + def seek(self, offset: int, whence: int = 0) -> int: + """_summary_ + """ + if whence == os.SEEK_SET: + offset += self._base + elif whence == os.SEEK_CUR: + offset += self._off + elif whence == os.SEEK_END: + offset += self._limit + else: + raise ValueError(f'invalid whence {whence}') + + if offset < self._base: + raise OSError("seek() returned an invalid position") + + self._off = offset + + return offset - self._base + + def seekable(self) -> bool: + """_summary_ + """ + return self._reader.seekable() + + def tell(self) -> int: + """_summary_ + """ + return self._off - self._base + + def __len__(self): + return self._limit - self._base + +class StreamBodyReader(StreamBody): + """ + A StreamBodyReader that convert HttpResponse type to StreamBody type. + """ + def __init__( + self, + response: HttpResponse, + ) -> None: + self._response = response + + def __enter__(self) -> "StreamBodyReader": + self._response.__enter__() + return self + + def __exit__(self, *args: Any) -> None: + self._response.__exit__(*args) + + @property + def is_closed(self) -> bool: + return self._response.is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._response.is_stream_consumed + + @property + def content(self) -> bytes: + if not self._response.is_stream_consumed: + self._response.read() + return self._response.content + + def read(self) -> bytes: + return self._response.read() + + def close(self) -> None: + self._response.close() + + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + return self._response.iter_bytes(**kwargs) + +class StreamBodyDiscarder(StreamBody): + """_summary_ + """ + def __init__( + self, + stream: StreamBody, + discard: int + ) -> None: + self._stream = stream + self._discard = discard + + def __enter__(self) -> "StreamBodyDiscarder": + self._stream.__enter__() + return self + + def __exit__(self, *args: Any) -> None: + self._stream.__exit__(*args) + + @property + def is_closed(self) -> bool: + return self._stream.is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._stream.is_stream_consumed + + @property + def content(self) -> bytes: + if not self._stream.is_stream_consumed: + self._stream.read() + return self._stream.content[self._discard:] + + def read(self) -> bytes: + data = self._stream.read() + return data[self._discard:] + + def close(self) -> None: + self._stream.close() + + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + discard = self._discard + for d in self._stream.iter_bytes(**kwargs): + if discard > 0: + if discard > len(d): + discard -= len(d) + else: + yield d[discard:] + discard = 0 + else: + yield d + + +class LimitReader: + """_summary_ + """ + def __init__( + self, + reader: IO[bytes], + n: int, + ) -> None: + self._reader = reader + self._n = n + + def read(self, n: int = -1) -> AnyStr: + """_summary_ + """ + if self._n <= 0: + return b'' + + if n < 0 or n > self._n: + n = self._n + + d = self._reader.read(n) + self._n -= len(d) + return d diff --git a/alibabacloud_oss_v2/models/__init__.py b/alibabacloud_oss_v2/models/__init__.py new file mode 100644 index 0000000..aa86a72 --- /dev/null +++ b/alibabacloud_oss_v2/models/__init__.py @@ -0,0 +1,6 @@ + +from .enums import * +from .service import * +from .region import * +from .bucket_basic import * +from .object_basic import * diff --git a/alibabacloud_oss_v2/models/bucket_basic.py b/alibabacloud_oss_v2/models/bucket_basic.py new file mode 100644 index 0000000..0da3833 --- /dev/null +++ b/alibabacloud_oss_v2/models/bucket_basic.py @@ -0,0 +1,1258 @@ +"""Models for bucket basic operation APIs""" +# pylint: disable=too-few-public-methods, too-many-instance-attributes, unnecessary-lambda +# pylint: disable=super-init-not-called, too-many-lines, line-too-long, too-many-arguments + +import datetime +from typing import Optional, List, Any +from .. import serde + + +class CreateBucketConfiguration(serde.Model): + """The configuration information for the bucket.""" + + def __init__( + self, + storage_class: Optional[str] = None, + data_redundancy_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + storage_class (str, optional): The storage class of the bucket. + data_redundancy_type (str, optional): The redundancy type of the bucket. + """ + super().__init__(**kwargs) + self.storage_class = storage_class + self.data_redundancy_type = data_redundancy_type + + _attribute_map = { + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "data_redundancy_type": {"tag": "xml", "rename": "DataRedundancyType"}, + } + _xml_map = { + "name": "CreateBucketConfiguration" + } + + +class PutBucketRequest(serde.RequestModel): + """The request for the PutBucket operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-acl"}, + "resource_group_id": {"tag": "input", "position": "header", "rename": "x-oss-resource-group-id"}, + "create_bucket_configuration": {"tag": "input", "position": "body", "rename": "CreateBucketConfiguration", "type": "xml"}, + } + + def __init__( + self, + bucket: str = None, + acl: Optional[str] = None, + resource_group_id: Optional[str] = None, + create_bucket_configuration: Optional["CreateBucketConfiguration"] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket to create. + acl (str, optional): The access control list (ACL) of the bucket. + resource_group_id (str, optional): The ID of the resource group. + create_bucket_configuration (CreateBucketConfiguration, optional): + The configuration information for the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.acl = acl + self.resource_group_id = resource_group_id + self.create_bucket_configuration = create_bucket_configuration + + +class PutBucketResult(serde.ResultModel): + """The result for the PutBucket operation.""" + + +class DeleteBucketRequest(serde.RequestModel): + """The request for the DeleteBucket operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket to create. + """ + super().__init__(**kwargs) + self.bucket = bucket + + +class DeleteBucketResult(serde.ResultModel): + """The result for the DeleteBucket operation.""" + + +class Owner(serde.Model): + """Stores information about the bucket owner.""" + + _attribute_map = { + "id": {"tag": "xml", "rename": "ID"}, + "display_name": {"tag": "xml", "rename": "DisplayName"}, + } + + _xml_map = { + "name": "Owner" + } + + def __init__( + self, + id: Optional[str] = None, # pylint: disable=redefined-builtin + display_name: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + id (str, optional): The ID of the bucket owner. + display_name (str, optional): The name of the object owner.. + """ + super().__init__(**kwargs) + self.id = id + self.display_name = display_name + + +class ObjectProperties(serde.Model): + """Stores the metadata of the object.""" + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "object_type": {"tag": "xml", "rename": "Type"}, + "size": {"tag": "xml", "rename": "Size", "type": "int"}, + "etag": {"tag": "xml", "rename": "ETag"}, + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "restore_info": {"tag": "xml", "rename": "RestoreInfo"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = { + "name": "ObjectProperties" + } + + def __init__( + self, + key: Optional[str] = None, + object_type: Optional[str] = None, + size: Optional[int] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + storage_class: Optional[str] = None, + owner: Optional[Owner] = None, + restore_info: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the object. + object_type (str, optional): The type of the object. + Valid values: Normal, Multipart and Appendable + size (int, optional): The size of the returned object. Unit: bytes. + etag (str, optional): The entity tag (ETag). An ETag is created when an object is created to + identify the content of the object. + last_modified (datetime, optional): The time when the returned objects were last modified. + storage_class (str, optional): The storage class of the object. + owner (str, optional): The container that stores information about the bucket owner. + restore_info (Owner, optional): The restoration status of the object. + """ + super().__init__(**kwargs) + self.key = key + self.object_type = object_type + self.size = size + self.etag = etag + self.last_modified = last_modified + self.storage_class = storage_class + self.owner = owner + self.restore_info = restore_info + + +class CommonPrefix(serde.Model): + """ + If the Delimiter parameter is specified in the request, + the response contains the CommonPrefixes parameter. + The objects whose names contain the same string from the prefix + to the next occurrence of the delimiter are grouped as + a single result element in the CommonPrefixes parameter. + """ + + _attribute_map = { + "prefix": {"tag": "xml", "rename": "Prefix"}, + } + + _xml_map = { + "name": "CommonPrefix" + } + + def __init__( + self, + prefix: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + prefix (str, optional): The prefix contained in the returned object names. + """ + super().__init__(**kwargs) + self.prefix = prefix + + +class ListObjectsRequest(serde.RequestModel): + """The request for the ListObjects operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "delimiter": {"tag": "input", "position": "query", "rename": "delimiter"}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "marker": {"tag": "input", "position": "query", "rename": "marker"}, + "max_keys": {"tag": "input", "position": "query", "rename": "max-keys", "type": "int"}, + "prefix": {"tag": "input", "position": "query", "rename": "prefix"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + delimiter: Optional[str] = None, + encoding_type: Optional[str] = None, + marker: Optional[str] = None, + max_keys: Optional[int] = None, + prefix: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket to create. + delimiter (str, optional): The character that is used to group objects by name. + If you specify the delimiter parameter in the request, the response contains + the CommonPrefixes parameter. The objects whose names contain the same string + from the prefix to the next occurrence of the delimiter are grouped + as a single result element in CommonPrefixes. + encoding_type (str, optional): The encoding type of the content in the response. Valid value: url + marker (str, optional): The name of the object after which the ListObjects (GetBucket) operation starts. + If this parameter is specified, objects whose names are alphabetically + greater than the marker value are returned. + max_keys (int, optional): The maximum number of objects that you want to return. + If the list operation cannot be complete at a time, because the max-keys parameter is specified, + the NextMarker element is included in the response as the marker for the next list operation. + prefix (str, optional): The prefix that the names of the returned objects must contain. + request_payer (str, optional): To indicate that the requester is aware that the request + and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.delimiter = delimiter + self.encoding_type = encoding_type + self.marker = marker + self.max_keys = max_keys + self.prefix = prefix + self.request_payer = request_payer + + +class ListObjectsResult(serde.ResultModel): + """The result for the ListObjects operation.""" + + _attribute_map = { + "name": {"tag": "xml", "rename": "Name"}, + "prefix": {"tag": "xml", "rename": "Prefix"}, + "marker": {"tag": "xml", "rename": "Marker"}, + "max_keys": {"tag": "xml", "rename": "MaxKeys", "type": "int"}, + "delimiter": {"tag": "xml", "rename": "Delimiter"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "next_marker": {"tag": "xml", "rename": "NextMarker"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "contents": {"tag": "xml", "rename": "Contents", "type": "[ObjectProperties]"}, + "common_prefixes": {"tag": "xml", "rename": "CommonPrefixes", "type": "[CommonPrefix]"}, + } + + _dependency_map = { + "ObjectProperties": {"new": lambda: ObjectProperties()}, + "CommonPrefix": {"new": lambda: CommonPrefix()}, + } + + def __init__( + self, + name: Optional[str] = None, + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_keys: Optional[int] = None, + delimiter: Optional[str] = None, + is_truncated: Optional[bool] = None, + next_marker: Optional[str] = None, + encoding_type: Optional[str] = None, + contents: Optional[List[ObjectProperties]] = None, + common_prefixes: Optional[List[CommonPrefix]] = None, + **kwargs: Any + ) -> None: + """ + name (str, optional): The name of the bucket. + prefix (str, optional): The prefix contained in the returned object names. + marker (str, optional): The name of the object after which the list operation begins. + max_keys (int, optional): The maximum number of returned objects in the response. + delimiter (str, optional): The character that is used to group objects by name. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + next_marker (str, optional): The position from which the next list operation starts. + encoding_type (str, optional): The encoding type of the content in the response. + contents ([ObjectProperties], optional): The container that stores the metadata of the returned objects. + common_prefixes ([CommonPrefix], optional): If the Delimiter parameter is specified in the request, + the response contains the CommonPrefixes element. + """ + super().__init__(**kwargs) + self.name = name + self.prefix = prefix + self.marker = marker + self.max_keys = max_keys + self.delimiter = delimiter + self.is_truncated = is_truncated + self.next_marker = next_marker + self.encoding_type = encoding_type + self.contents = contents + self.common_prefixes = common_prefixes + + +class PutBucketAclRequest(serde.RequestModel): + """The request for the PutBucketAcl operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-acl"}, + } + + def __init__( + self, + bucket: str = None, + acl: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + acl (str, optional): The access control list (ACL) of the object. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.acl = acl + + +class PutBucketAclResult(serde.ResultModel): + """The result for the PutBucketAcl operation.""" + + +class GetBucketAclRequest(serde.RequestModel): + """The request for the GetBucketAcl operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + +class AccessControlList(serde.Model): + """Store ACL information.""" + + _attribute_map = { + "acl": {"tag": "xml", "rename": "Grant"}, + } + + _xml_map = { + "name": "AccessControlList" + } + + def __init__( + self, + acl: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + acl (str, optional): The access control list (ACL) of the object. + """ + super().__init__(**kwargs) + self.acl = acl + + +class GetBucketAclResult(serde.ResultModel): + """The result for the GetBucketAcl operation.""" + + _attribute_map = { + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "acl": {"tag": "xml", "rename": "AccessControlList/Grant"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = { + "name": "AccessControlPolicy" + } + + def __init__( + self, + owner: Optional[Owner] = None, + acl: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + owner (str, optional): The container that stores information about the bucket owner. + acl (str, optional): The access control list (ACL) of the object. + """ + super().__init__(**kwargs) + self.owner = owner + self.acl = acl + + +class ListObjectsV2Request(serde.RequestModel): + """The request for the ListObjectsV2 operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "delimiter": {"tag": "input", "position": "query", "rename": "delimiter"}, + "start_after": {"tag": "input", "position": "query", "rename": "start-after"}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "continuation_token": {"tag": "input", "position": "query", "rename": "continuation-token"}, + "max_keys": {"tag": "input", "position": "query", "rename": "max-keys", "type": "int"}, + "prefix": {"tag": "input", "position": "query", "rename": "prefix"}, + "fetch_owner": {"tag": "input", "position": "query", "rename": "fetch-owner", "type": "bool"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + delimiter: Optional[str] = None, + start_after: Optional[str] = None, + encoding_type: Optional[str] = None, + continuation_token: Optional[str] = None, + max_keys: Optional[int] = None, + prefix: Optional[str] = None, + fetch_owner: Optional[bool] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + delimiter (str, optional): The character that is used to group objects by name. + If you specify the delimiter parameter in the request, the response contains + the CommonPrefixes parameter. The objects whose names contain the same string + from the prefix to the next occurrence of the delimiter are grouped + as a single result element in CommonPrefixes. + start_after (str, optional): Set to return objects in alphabetical order starting from start after. + encoding_type (str, optional): The encoding type of the content in the response. Valid value: url + continuation_token (str, optional): The specified List operation needs to start from this token. + You can obtain this token from the NextContinuationToken in the ListObjectiesV2 (GetBucketV2) result. + max_keys (int, optional): The maximum number of objects that you want to return. + If the list operation cannot be complete at a time, because the max-keys parameter is specified, + the NextMarker element is included in the response as the marker for the next list operation. + prefix (str, optional): The prefix that the names of the returned objects must contain. + fetch_owner (str, optional): Specify whether to include owner information in the return result. + Legitimate values: true, false + request_payer (str, optional): To indicate that the requester is aware that the request + and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.delimiter = delimiter + self.start_after = start_after + self.encoding_type = encoding_type + self.continuation_token = continuation_token + self.max_keys = max_keys + self.prefix = prefix + self.fetch_owner = fetch_owner + self.request_payer = request_payer + + +class ListObjectsV2Result(serde.ResultModel): + """The result for the ListObjectsV2 operation.""" + + _attribute_map = { + "name": {"tag": "xml", "rename": "Name"}, + "prefix": {"tag": "xml", "rename": "Prefix"}, + "continuation_token": {"tag": "xml", "rename": "ContinuationToken"}, + "max_keys": {"tag": "xml", "rename": "MaxKeys", "type": "int"}, + "delimiter": {"tag": "xml", "rename": "Delimiter"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "next_continuation_token": {"tag": "xml", "rename": "NextContinuationToken"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "contents": {"tag": "xml", "rename": "Contents", "type": "[ObjectProperties]"}, + "common_prefixes": {"tag": "xml", "rename": "CommonPrefixes", "type": "[CommonPrefix]"}, + "start_after": {"tag": "xml", "rename": "StartAfter"}, + "key_count": {"tag": "xml", "rename": "KeyCount", "type": "int"}, + } + + _dependency_map = { + "ObjectProperties": {"new": lambda: ObjectProperties()}, + "CommonPrefix": {"new": lambda: CommonPrefix()}, + } + + def __init__( + self, + name: Optional[str] = None, + prefix: Optional[str] = None, + continuation_token: Optional[str] = None, + max_keys: Optional[int] = None, + delimiter: Optional[str] = None, + is_truncated: Optional[bool] = None, + next_continuation_token: Optional[str] = None, + encoding_type: Optional[str] = None, + contents: Optional[List[ObjectProperties]] = None, + common_prefixes: Optional[List[CommonPrefix]] = None, + start_after: Optional[str] = None, + key_count: Optional[int] = None, + **kwargs: Any + ) -> None: + """ + name (str, optional): The name of the bucket. + prefix (str, optional): The prefix contained in the returned object names. + continuation_token (str, optional): The name of the object after which the list operation begins. + max_keys (int, optional): The maximum number of returned objects in the response. + delimiter (str, optional): The character that is used to group objects by name. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + next_continuation_token (str, optional): The position from which the next list operation starts. + encoding_type (str, optional): The encoding type of the content in the response. + contents ([ObjectProperties], optional): The container that stores the metadata of the returned objects. + common_prefixes ([CommonPrefix], optional): If the Delimiter parameter is specified in the request, + the response contains the CommonPrefixes element. + start_after (str, optional): If the StartAfter parameter is specified in the request, + the StartAfter element will be included in the returned response. + key_count (int, optional): The number of keys returned in this request. If a delimiter is specified, + KeyCount is the sum of the elements of Key and Commonprefixes. + """ + super().__init__(**kwargs) + self.name = name + self.prefix = prefix + self.continuation_token = continuation_token + self.max_keys = max_keys + self.delimiter = delimiter + self.is_truncated = is_truncated + self.next_continuation_token = next_continuation_token + self.encoding_type = encoding_type + self.contents = contents + self.common_prefixes = common_prefixes + self.start_after = start_after + self.key_count = key_count + + +class GetBucketStatRequest(serde.RequestModel): + """The request for the GetBucketStat operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + + +class GetBucketStatResult(serde.ResultModel): + """The result for the GetBucketStat operation.""" + + _attribute_map = { + "storage": {"tag": "xml", "rename": "Storage", "type": "int"}, + "object_count": {"tag": "xml", "rename": "ObjectCount", "type": "int"}, + "multi_part_upload_count": {"tag": "xml", "rename": "MultipartUploadCount", "type": "int"}, + "live_channel_count": {"tag": "xml", "rename": "LiveChannelCount", "type": "int"}, + "last_modified_time": {"tag": "xml", "rename": "LastModifiedTime", "type": "int"}, + "standard_storage": {"tag": "xml", "rename": "StandardStorage", "type": "int"}, + "standard_object_count": {"tag": "xml", "rename": "StandardObjectCount", "type": "int"}, + "infrequent_access_storage": {"tag": "xml", "rename": "InfrequentAccessStorage", "type": "int"}, + "infrequent_access_real_storage": {"tag": "xml", "rename": "InfrequentAccessRealStorage", "type": "int"}, + "infrequent_access_object_count": {"tag": "xml", "rename": "InfrequentAccessObjectCount", "type": "int"}, + "archive_storage": {"tag": "xml", "rename": "ArchiveStorage", "type": "int"}, + "archive_real_storage": {"tag": "xml", "rename": "ArchiveRealStorage", "type": "int"}, + "archive_object_count": {"tag": "xml", "rename": "ArchiveObjectCount", "type": "int"}, + "cold_archive_storage": {"tag": "xml", "rename": "ColdArchiveStorage", "type": "int"}, + "cold_archive_real_storage": {"tag": "xml", "rename": "ColdArchiveRealStorage", "type": "int"}, + "cold_archive_object_count": {"tag": "xml", "rename": "ColdArchiveObjectCount", "type": "int"}, + "deep_cold_archive_storage": {"tag": "xml", "rename": "DeepColdArchiveStorage", "type": "int"}, + "deep_cold_archive_real_storage": {"tag": "xml", "rename": "DeepColdArchiveRealStorage", "type": "int"}, + "deep_cold_archive_object_count": {"tag": "xml", "rename": "DeepColdArchiveObjectCount", "type": "int"}, + "delete_marker_count": {"tag": "xml", "rename": "DeleteMarkerCount", "type": "int"}, + } + + _xml_map = { + "name": "BucketStat" + } + + def __init__( + self, + storage: Optional[int] = None, + object_count: Optional[int] = None, + multi_part_upload_count: Optional[int] = None, + live_channel_count: Optional[int] = None, + last_modified_time: Optional[int] = None, + standard_storage: Optional[int] = None, + standard_object_count: Optional[int] = None, + infrequent_access_storage: Optional[int] = None, + infrequent_access_real_storage: Optional[int] = None, + infrequent_access_object_count: Optional[int] = None, + archive_storage: Optional[int] = None, + archive_real_storage: Optional[int] = None, + archive_object_count: Optional[int] = None, + cold_archive_storage: Optional[int] = None, + cold_archive_real_storage: Optional[int] = None, + cold_archive_object_count: Optional[int] = None, + deep_cold_archive_storage: Optional[int] = None, + deep_cold_archive_real_storage: Optional[int] = None, + deep_cold_archive_object_count: Optional[int] = None, + delete_marker_count: Optional[int] = None, + + **kwargs: Any + ) -> None: + """ + storage (int, optional): The total actual storage capacity of the bucket, measured in bytes. + object_count (int, optional): The total number of objects in the bucket. + multi_part_upload_count (int, optional): The number of Multipart Uploads in the Bucket that have been initialized but not yet completed (Complete) or aborted (Abort). + live_channel_count (int, optional): The number of live channels in the bucket. + last_modified_time (int, optional): The time point at which the stored information is obtained, in the format of a timestamp and in seconds. + standard_storage (int, optional): The storage capacity of standard storage types, measured in bytes. + standard_object_count (int, optional): The number of standard storage type objects. + infrequent_access_storage (int, optional): The billing storage capacity of low-frequency storage type, in bytes. + infrequent_access_real_storage (int, optional): The actual storage capacity of low-frequency storage types, in bytes. + infrequent_access_object_count (int, optional): The number of low-frequency storage type objects. + archive_storage (int, optional): The billing storage capacity of archive storage type, in bytes. + archive_object_count (int, optional): The actual storage capacity of the archive storage type, in bytes. + cold_archive_storage (int, optional): The number of objects of archive storage type. + cold_archive_real_storage (int, optional): The billing storage capacity of cold archive storage type, in bytes. + cold_archive_object_count (int, optional): The actual storage capacity of the cold archive storage type, in bytes. + deep_cold_archive_storage (int, optional): The billing storage capacity of deep cold archive storage type, in bytes. + deep_cold_archive_real_storage (int, optional): The actual storage capacity of the deep cold archive storage type, in bytes. + deep_cold_archive_object_count (int, optional): The number of objects of the deep cold archive storage type. + delete_marker_count (int, optional): Delete the count of marker + """ + super().__init__(**kwargs) + self.storage = storage + self.object_count = object_count + self.multi_part_upload_count = multi_part_upload_count + self.live_channel_count = live_channel_count + self.last_modified_time = last_modified_time + self.standard_storage = standard_storage + self.standard_object_count = standard_object_count + self.infrequent_access_storage = infrequent_access_storage + self.infrequent_access_real_storage = infrequent_access_real_storage + self.infrequent_access_object_count = infrequent_access_object_count + self.archive_storage = archive_storage + self.archive_real_storage = archive_real_storage + self.archive_object_count = archive_object_count + self.cold_archive_storage = cold_archive_storage + self.cold_archive_real_storage = cold_archive_real_storage + self.cold_archive_object_count = cold_archive_object_count + self.deep_cold_archive_storage = deep_cold_archive_storage + self.deep_cold_archive_real_storage = deep_cold_archive_real_storage + self.deep_cold_archive_object_count = deep_cold_archive_object_count + self.delete_marker_count = delete_marker_count + + +class GetBucketLocationRequest(serde.RequestModel): + """The request for the GetBucketLocation operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + +class GetBucketLocationResult(serde.ResultModel): + """The result for the GetBucketLocation operation.""" + + _attribute_map = { + "location": {"tag": "xml", "rename": '.'}, + } + + _xml_map = { + "name": "LocationConstraint" + } + + def __init__( + self, + location: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + location (str, optional): The region in which the bucket is located. + """ + super().__init__(**kwargs) + self.location = location + + +class SSERule(serde.Model): + """Information on server-side encryption methods.""" + + _attribute_map = { + "kms_master_key_id": {"tag": "xml", "rename": "KMSMasterKeyID"}, + "sse_algorithm": {"tag": "xml", "rename": "SSEAlgorithm"}, + "kms_data_encryption": {"tag": "xml", "rename": "KMSDataEncryption"}, + } + + _xml_map = { + "name": "ServerSideEncryptionRule" + } + + def __init__( + self, + kms_master_key_id: Optional[str] = None, + sse_algorithm: Optional[str] = None, + kms_data_encryption: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + kms_master_key_id (str, optional): The customer master key (CMK) ID in use. A valid value is returned only if you set SSEAlgorithm to KMS + and specify the CMK ID. In other cases, an empty value is returned. + sse_algorithm (str, optional): The server-side encryption method that is used by default. + kms_data_encryption (str, optional): Object's encryption algorithm. If this element is not included in the response, + it indicates that the object is using the AES256 encryption algorithm. + This option is only valid if the SSEAlgorithm value is KMS. + """ + super().__init__(**kwargs) + self.kms_master_key_id = kms_master_key_id + self.sse_algorithm = sse_algorithm + self.kms_data_encryption = kms_data_encryption + +class BucketPolicy(serde.Model): + """The container that stores the logs.""" + + _attribute_map = { + "log_bucket": {"tag": "xml", "rename": "LogBucket"}, + "log_prefix": {"tag": "xml", "rename": "LogPrefix"}, + } + + _xml_map = { + "name": "BucketPolicy" + } + + def __init__( + self, + log_bucket: Optional[str] = None, + log_prefix: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + log_bucket (str, optional): The name of the bucket that stores the logs. + and specify the CMK ID. In other cases, an empty value is returned. + log_prefix (str, optional): The directory in which logs are stored. + """ + super().__init__(**kwargs) + self.log_bucket = log_bucket + self.log_prefix = log_prefix + + + +class BucketInfo(serde.Model): + """BucketInfo defines Bucket information.""" + + _attribute_map = { + "name": {"tag": "xml", "rename": "Name"}, + "access_monitor": {"tag": "xml", "rename": "AccessMonitor"}, + "location": {"tag": "xml", "rename": "Location"}, + "creation_date": {"tag": "xml", "rename": "CreationDate", "type": "datetime"}, + "extranet_endpoint": {"tag": "xml", "rename": "ExtranetEndpoint"}, + "intranet_endpoint": {"tag": "xml", "rename": "IntranetEndpoint"}, + "acl": {"tag": "xml", "rename": "AccessControlList/Grant"}, + "data_redundancy_type": {"tag": "xml", "rename": "DataRedundancyType"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "resource_group_id": {"tag": "xml", "rename": "ResourceGroupId"}, + "sse_rule": {"tag": "xml", "rename": "ServerSideEncryptionRule", "type": "SSERule"}, + "versioning": {"tag": "xml", "rename": "Versioning"}, + "transfer_acceleration": {"tag": "xml", "rename": "TransferAcceleration"}, + "cross_region_replication": {"tag": "xml", "rename": "CrossRegionReplication"}, + "bucket_policy": {"tag": "xml", "rename": "BucketPolicy", "type": "BucketPolicy"}, + "comment": {"tag": "xml", "rename": "Comment"}, + "block_public_access": {"tag": "xml", "rename": "BlockPublicAccess", "type": "bool"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + "SSERule": {"new": lambda: SSERule()}, + "BucketPolicy": {"new": lambda: BucketPolicy()}, + } + + _xml_map = { + "name": "Bucket" + } + + def __init__( + self, + name: Optional[str] = None, + access_monitor: Optional[str] = None, + location: Optional[str] = None, + creation_date: Optional[datetime.datetime] = None, + extranet_endpoint: Optional[str] = None, + intranet_endpoint: Optional[str] = None, + acl: Optional[str] = None, + data_redundancy_type: Optional[str] = None, + owner: Optional[Owner] = None, + storage_class: Optional[str] = None, + resource_group_id: Optional[str] = None, + sse_rule: Optional[SSERule] = None, + versioning: Optional[str] = None, + transfer_acceleration: Optional[str] = None, + cross_region_replication: Optional[str] = None, + bucket_policy: Optional[BucketPolicy] = None, + comment: Optional[str] = None, + block_public_access: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + name (str, optional): The name of the bucket. + access_monitor (str, optional): Indicates whether access tracking is enabled for the bucket. + location (str, optional): The region in which the bucket is located. + creation_date (datetime, optional): The time when the bucket is created. The time is in UTC. + extranet_endpoint (str, optional): The public endpoint that is used to access the bucket over the Internet. + intranet_endpoint (str, optional): The internal endpoint that is used to access the bucket from Elastic + acl (str, optional): The container that stores the access control list (ACL) information about the bucket. + data_redundancy_type (str, optional): The disaster recovery type of the bucket. + owner (Owner, optional): The container that stores information about the bucket owner. + storage_class (str, optional): The storage class of the bucket. + resource_group_id (str, optional): The ID of the resource group to which the bucket belongs. + sse_rule (SSERule, optional): The container that stores the server-side encryption method. + versioning (str, optional): Indicates whether versioning is enabled for the bucket. + transfer_acceleration (str, optional): Indicates whether transfer acceleration is enabled for the bucket. + cross_region_replication (str, optional): Indicates whether cross-region replication (CRR) is enabled for the bucket. + bucket_policy (BucketPolicy, optional): The container that stores the logs. + comment (str, optional): Annotation information. + block_public_access (bool, optional): Obtain configuration information for Bucket to block public access. + + """ + super().__init__(**kwargs) + self.name = name + self.access_monitor = access_monitor + self.location = location + self.creation_date = creation_date + self.extranet_endpoint = extranet_endpoint + self.intranet_endpoint = intranet_endpoint + self.acl = acl + self.data_redundancy_type = data_redundancy_type + self.owner = owner + self.storage_class = storage_class + self.resource_group_id = resource_group_id + self.sse_rule = sse_rule + self.versioning = versioning + self.transfer_acceleration = transfer_acceleration + self.cross_region_replication = cross_region_replication + self.bucket_policy = bucket_policy + self.comment = comment + self.block_public_access = block_public_access + +class GetBucketInfoRequest(serde.RequestModel): + """The request for the GetBucketInfoRequest operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + +class GetBucketInfoResult(serde.ResultModel): + """The result for the GetBucketInfoResult operation.""" + + _attribute_map = { + "bucket_info": {"tag": "xml", "rename": 'Bucket', "type": "BucketInfo"}, + } + + _dependency_map = { + "BucketInfo": {"new": lambda: BucketInfo()}, + } + + _xml_map = { + "name": "BucketInfo" + } + + def __init__( + self, + bucket_info: Optional[BucketInfo] = None, + **kwargs: Any + ) -> None: + """ + bucket_info (BucketInfo, optional): BucketInfo defines Bucket information. + """ + super().__init__(**kwargs) + self.bucket_info = bucket_info + + +class VersioningConfiguration(serde.Model): + """The versioning state of the bucket. Valid values: Enabled,Suspended.""" + + _attribute_map = { + "status": {"tag": "xml", "rename": "Status"}, + } + + _xml_map = { + "name": "VersioningConfiguration" + } + + def __init__( + self, + status: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + status (str, optional): The versioning state of the bucket. Valid values: Enabled,Suspended + """ + super().__init__(**kwargs) + self.status = status + + +class PutBucketVersioningRequest(serde.RequestModel): + """The request for the PutBucketVersioning operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "versioning_configuration": {"tag": "input", "position": "body", "rename": "VersioningConfiguration", "type": "xml"}, + } + + _xml_map = { + "name": "VersioningConfiguration" + } + + def __init__( + self, + bucket: str = None, + versioning_configuration: Optional["VersioningConfiguration"] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + versioning_configuration (VersioningConfiguration, optional): A container for storing version control status. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.versioning_configuration = versioning_configuration + + +class PutBucketVersioningResult(serde.ResultModel): + """The result for the PutBucketVersioning operation.""" + +class GetBucketVersioningRequest(serde.RequestModel): + """The request for the GetBucketVersioning operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + } + + def __init__( + self, + bucket: str = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + """ + super().__init__(**kwargs) + self.bucket = bucket + +class GetBucketVersioningResult(serde.ResultModel): + """The result for the GetBucketVersioning operation.""" + + _attribute_map = { + "version_status": {"tag": "xml", "rename": "Status"}, + } + + _xml_map = { + "name": "VersioningConfiguration" + } + + def __init__( + self, + version_status: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + version_status (str, optional): The versioning state of the bucket. Valid values: Enabled,Suspended + """ + super().__init__(**kwargs) + self.version_status = version_status + + +class ListObjectVersionsRequest(serde.RequestModel): + """The request for the ListObjectVersions operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "delimiter": {"tag": "input", "position": "query", "rename": "delimiter"}, + "key_marker": {"tag": "input", "position": "query", "rename": "key-marker"}, + "version_id_marker": {"tag": "input", "position": "query", "rename": "version-id-marker"}, + "max_keys": {"tag": "input", "position": "query", "rename": "max-keys", "type": "int"}, + "prefix": {"tag": "input", "position": "query", "rename": "prefix"}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + delimiter: Optional[str] = None, + key_marker: Optional[str] = None, + version_id_marker: Optional[str] = None, + max_keys: Optional[int] = None, + prefix: Optional[str] = None, + encoding_type: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + delimiter (str, optional): The character that is used to group objects by name. + If you specify the delimiter parameter in the request, the response contains + the CommonPrefixes parameter. The objects whose names contain the same string + from the prefix to the next occurrence of the delimiter are grouped + as a single result element in CommonPrefixes. + key_marker (str, optional): Specifies that objects whose names are alphabetically after the value of the key-marker parameter are returned. + This parameter can be specified together with version-id-marker. + By default, this parameter is left empty. + version_id_marker (str, optional): Specifies that the versions created before the version specified by version-id-marker for the object + whose name is specified by key-marker are returned by creation time in descending order. + By default, if this parameter is not specified, the results are returned from the latest + version of the object whose name is alphabetically after the value of key-marker. + max_keys (int, optional): The maximum number of objects that you want to return. + If the list operation cannot be complete at a time, because the max-keys parameter is specified, + the NextMarker element is included in the response as the marker for the next list operation. + prefix (str, optional): The prefix that the names of the returned objects must contain. + encoding_type (str, optional): The encoding type of the content in the response. Valid value: url + request_payer (str, optional): To indicate that the requester is aware that the request + and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.delimiter = delimiter + self.key_marker = key_marker + self.version_id_marker = version_id_marker + self.max_keys = max_keys + self.prefix = prefix + self.encoding_type = encoding_type + self.request_payer = request_payer + + +class ObjectVersionProperties(serde.Model): + """Stores the metadata of the object version.""" + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "version_id": {"tag": "xml", "rename": "VersionId"}, + "is_latest": {"tag": "xml", "rename": "IsLatest", "type": "bool"}, + "object_type": {"tag": "xml", "rename": "Type"}, + "size": {"tag": "xml", "rename": "Size", "type": "int"}, + "etag": {"tag": "xml", "rename": "ETag"}, + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "restore_info": {"tag": "xml", "rename": "RestoreInfo"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = { + "name": "Version" + } + + def __init__( + self, + key: Optional[str] = None, + version_id: Optional[str] = None, + is_latest: Optional[bool] = None, + object_type: Optional[str] = None, + size: Optional[int] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + storage_class: Optional[str] = None, + owner: Optional[Owner] = None, + restore_info: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the object. + version_id (str, optional): The version ID of the object. + is_latest (bool, optional): Indicates whether the version is the current version. + object_type (str, optional): The type of the object. + Valid values: Normal, Multipart and Appendable + size (int, optional): The size of the returned object. Unit: bytes. + etag (str, optional): The entity tag (ETag). An ETag is created when an object is created to + identify the content of the object. + last_modified (datetime, optional): The time when the returned objects were last modified. + storage_class (str, optional): The storage class of the object. + owner (str, optional): The container that stores information about the bucket owner. + restore_info (Owner, optional): The restoration status of the object. + """ + super().__init__(**kwargs) + self.key = key + self.version_id = version_id + self.is_latest = is_latest + self.object_type = object_type + self.size = size + self.etag = etag + self.last_modified = last_modified + self.storage_class = storage_class + self.owner = owner + self.restore_info = restore_info + + +class DeleteMarkerProperties(serde.Model): + """The container that stores delete markers.""" + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "version_id": {"tag": "xml", "rename": "VersionId"}, + "is_latest": {"tag": "xml", "rename": "IsLatest", "type": "bool"}, + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = { + "name": "DeleteMarker" + } + + def __init__( + self, + key: Optional[str] = None, + version_id: Optional[str] = None, + is_latest: Optional[bool] = None, + last_modified: Optional[datetime.datetime] = None, + owner: Optional[Owner] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the object. + version_id (str, optional): The version ID of the object. + is_latest (bool, optional): Indicates whether the version is the current version. + last_modified (datetime, optional): The time when the returned objects were last modified. + owner (str, optional): The container that stores information about the bucket owner. + """ + super().__init__(**kwargs) + self.key = key + self.version_id = version_id + self.is_latest = is_latest + self.last_modified = last_modified + self.owner = owner + + +class ListObjectVersionsResult(serde.ResultModel): + """The result for the ListObjectVersions operation.""" + + _attribute_map = { + "name": {"tag": "xml", "rename": "Name"}, + "key_marker": {"tag": "xml", "rename": "KeyMarker"}, + "next_key_marker": {"tag": "xml", "rename": "NextKeyMarker"}, + "version_id_marker": {"tag": "xml", "rename": "VersionIdMarker"}, + "next_version_id_marker": {"tag": "xml", "rename": "NextVersionIdMarker"}, + "prefix": {"tag": "xml", "rename": "Prefix"}, + "max_keys": {"tag": "xml", "rename": "MaxKeys", "type": "int"}, + "delimiter": {"tag": "xml", "rename": "Delimiter"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "version": {"tag": "xml", "rename": "Version", "type": "[ObjectVersionProperties]"}, + "delete_marker": {"tag": "xml", "rename": "DeleteMarker", "type": "[DeleteMarkerProperties]"}, + "common_prefixes": {"tag": "xml", "rename": "CommonPrefixes", "type": "[CommonPrefix]"}, + } + + _dependency_map = { + "ObjectVersionProperties": {"new": lambda: ObjectVersionProperties()}, + "DeleteMarkerProperties": {"new": lambda: DeleteMarkerProperties()}, + "CommonPrefix": {"new": lambda: CommonPrefix()}, + } + + _xml_map = { + "name": "ListVersionsResult" + } + + def __init__( + self, + name: Optional[str] = None, + key_marker: Optional[str] = None, + next_key_marker: Optional[str] = None, + version_id_marker: Optional[str] = None, + next_version_id_marker: Optional[str] = None, + prefix: Optional[str] = None, + max_keys: Optional[int] = None, + delimiter: Optional[str] = None, + is_truncated: Optional[bool] = None, + encoding_type: Optional[str] = None, + version: Optional[List[ObjectVersionProperties]] = None, + delete_marker: Optional[List[DeleteMarkerProperties]] = None, + common_prefixes: Optional[List[CommonPrefix]] = None, + **kwargs: Any + ) -> None: + """ + name (str, optional): The name of the bucket. + key_marker (str, optional): Indicates the object from which the ListObjectVersions (GetBucketVersions) operation starts. + next_key_marker (str, optional): If not all results are returned for the request, the NextKeyMarker parameter is included + in the response to indicate the key-marker value of the next ListObjectVersions (GetBucketVersions) request. + version_id_marker (str, optional): The version from which the ListObjectVersions (GetBucketVersions) operation starts. + next_version_id_marker (str, optional): If not all results are returned for the request, the NextVersionIdMarker parameter is included in + the response to indicate the version-id-marker value of the next ListObjectVersions (GetBucketVersions) request. + prefix (str, optional): The prefix contained in the returned object names. + max_keys (int, optional): The maximum number of returned objects in the response. + delimiter (str, optional): The character that is used to group objects by name. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + encoding_type (str, optional): The encoding type of the content in the response. + version ([ObjectVersionProperties], optional): The container that stores the versions of objects, excluding delete markers. + delete_marker ([DeleteMarkerProperties], optional): The container that stores delete markers. + common_prefixes ([CommonPrefix], optional): If the Delimiter parameter is specified in the request, + the response contains the CommonPrefixes element. + """ + super().__init__(**kwargs) + self.name = name + self.key_marker = key_marker + self.next_key_marker = next_key_marker + self.version_id_marker = version_id_marker + self.next_version_id_marker = next_version_id_marker + self.prefix = prefix + self.max_keys = max_keys + self.delimiter = delimiter + self.is_truncated = is_truncated + self.encoding_type = encoding_type + self.version = version + self.delete_marker = delete_marker + self.common_prefixes = common_prefixes diff --git a/alibabacloud_oss_v2/models/enums.py b/alibabacloud_oss_v2/models/enums.py new file mode 100644 index 0000000..d1a3c6d --- /dev/null +++ b/alibabacloud_oss_v2/models/enums.py @@ -0,0 +1,109 @@ +"""Enum for operation APIs""" +from enum import Enum + + +class BucketACLType(str, Enum): + """ + The access control list (ACL) of the bucket. + """ + + PRIVATE = 'private' + """ + Only the bucket owner can perform read and write operations on objects in the bucket. + Other users cannot access the objects in the bucket. + """ + + PUBLICREAD = 'public-read' + """ + Only the bucket owner can write data to objects in the bucket. + Other users, including anonymous users, can only read objects in the bucket. + """ + + PUBLICREADWRITE = 'public-read-write' + """ + All users, including anonymous users, can perform read and write operations on the bucket. + """ + + +class StorageClassType(str, Enum): + """ + The storage class of the bucket. + """ + + STANDARD = 'Standard' + """ + Standard provides highly reliable, highly available and high-performance object storage + for data that is frequently accessed. + """ + + IA = 'IA' + """ + IA provides highly durable storage at lower prices compared with Standard. + It has a minimum billable size of 64 KB and a minimum billable storage duration of 30 days. + """ + + ARCHIVE = 'Archive' + """ + Archive provides high-durability storage at lower prices compared with Standard and IA. + It has a minimum billable size of 64 KB and a minimum billable storage duration of 60 days. + """ + + COLDARCHIVE = 'ColdArchive' + """ + Cold Archive provides highly durable storage at lower prices compared with Archive. + It has a minimum billable size of 64 KB and a minimum billable storage duration of 180 days. + """ + + DEEPCOLDARCHIVE = 'DeepColdArchive' + """ + Deep Cold Archive provides highly durable storage at lower prices compared with Cold Archive. + It has a minimum billable size of 64 KB and a minimum billable storage duration of 180 days. + """ + + +class DataRedundancyType(str, Enum): + """ + The redundancy type of the bucket. + """ + + LRS = 'LRS' + """ + LRS Locally redundant storage(LRS) stores copies of each object across different devices + in the same zone. This ensures data reliability and availability even if two storage devices + are damaged at the same time. + """ + + ZRS = 'ZRS' + """ + ZRS Zone-redundant storage(ZRS) uses the multi-zone mechanism to distribute user data across + multiple zones in the same region. If one zone becomes unavailable, you can continue to + access the data that is stored in other zones. + """ + + +class ObjectACLType(str, Enum): + """ + The access control list(ACL) of the object. + """ + + PRIVATE = 'private' + """ + Only the object owner is allowed to perform read and write operations on the object. + Other users cannot access the object. + """ + + PUBLICREAD = 'public-read' + """ + Only the object owner can write data to the object. + Other users, including anonymous users, can only read the object. + """ + + PUBLICREADWRITE = 'public-read-write' + """ + All users, including anonymous users, can perform read and write operations on the object. + """ + + DEFAULT = 'default' + """ + The ACL of the object is the same as that of the bucket in which the object is stored. + """ diff --git a/alibabacloud_oss_v2/models/object_basic.py b/alibabacloud_oss_v2/models/object_basic.py new file mode 100644 index 0000000..4476a69 --- /dev/null +++ b/alibabacloud_oss_v2/models/object_basic.py @@ -0,0 +1,2810 @@ +"""Models for object operation APIs""" +# pylint: disable=too-few-public-methods, too-many-instance-attributes, unnecessary-lambda +# pylint: disable=super-init-not-called, too-many-lines, line-too-long, too-many-arguments +# pylint: disable=too-many-locals +import datetime +from typing import Optional, Dict, Any, MutableMapping, List +from .. import serde +from ..types import BodyType, StreamBody +from .bucket_basic import Owner + + +class PutObjectRequest(serde.RequestModel): + """The request for the PutObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl"}, + "storage_class": {"tag": "input", "position": "header", "rename": "x-oss-storage-class"}, + "metadata": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "input", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "input", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "input", "position": "header", "rename": "Content-Encoding"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_md5": {"tag": "input", "position": "header", "rename": "Content-MD5"}, + "content_type": {"tag": "input", "position": "header", "rename": "Content-Type"}, + "expires": {"tag": "input", "position": "header", "rename": "Expires"}, + "server_side_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "tagging": {"tag": "input", "position": "header", "rename": "x-oss-tagging"}, + "callback": {"tag": "input", "position": "header", "rename": "x-oss-callback"}, + "callback_var": {"tag": "input", "position": "header", "rename": "x-oss-callback-var"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "body": {"tag": "input", "position": "body"}, + "progress_fn": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + acl: Optional[str] = None, + storage_class: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_length: Optional[int] = None, + content_md5: Optional[str] = None, + content_type: Optional[str] = None, + expires: Optional[str] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + tagging: Optional[str] = None, + callback: Optional[str] = None, + callback_var: Optional[str] = None, + forbid_overwrite: Optional[bool] = None, + traffic_limit: Optional[int] = None, + request_payer: Optional[str] = None, + body: Optional[BodyType] = None, + progress_fn: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + acl (str, optional): The access control list (ACL) of the object. + storage_class (str, optional): The storage class of the object. + metadata (MutableMapping,The metadata of the object that you want to upload. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + content_md5 (str, optional): The MD5 hash of the object that you want to upload. + content_type (str, optional): A standard MIME type describing the format of the contents. + expires (str, optional): The expiration time of the cache in UTC. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only + when the x-oss-server-side-encryption header is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + tagging (str, optional): The tags that are specified for the object by using a key-value pair. + You can specify multiple tags for an object. Example: TagA=A&TagB=B. + callback (str, optional): A callback parameter is a Base64-encoded string that contains multiple fields in the JSON format. + callback_var (str, optional): Configure custom parameters by using the callback-var parameter. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + traffic_limit (int, optional): Specify the speed limit value. + The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + body (BodyType,optional): Object data. + progress_fn (Any,optional): Progress callback function. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.acl = acl + self.storage_class = storage_class + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.content_length = content_length + self.content_md5 = content_md5 + self.content_type = content_type + self.expires = expires + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.tagging = tagging + self.callback = callback + self.callback_var = callback_var + self.forbid_overwrite = forbid_overwrite + self.traffic_limit = traffic_limit + self.request_payer = request_payer + self.body = body + self.progress_fn = progress_fn + + +class PutObjectResult(serde.ResultModel): + """The result for the PutObject operation.""" + + _attribute_map = { + "content_md5": {"tag": "output", "position": "header", "rename": "Content-MD5"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "callback_result": {"tag": "output", "position": "body", "type": "dict,json"}, + } + + def __init__( + self, + content_md5: Optional[str] = None, + etag: Optional[str] = None, + hash_crc64: Optional[str] = None, + version_id: Optional[str] = None, + callback_result: Optional[Dict] = None, + **kwargs: Any + ) -> None: + """ + Args: + content_md5 (str, optional): Content-Md5 for the uploaded object. + etag (str, optional): Entity tag for the uploaded object. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + version_id (str, optional): Version of the object. + callback_result (dict, optional): Callback result, + it is valid only when the callback is set. + """ + super().__init__(**kwargs) + self.content_md5 = content_md5 + self.etag = etag + self.hash_crc64 = hash_crc64 + self.version_id = version_id + self.callback_result = callback_result + + +class HeadObjectRequest(serde.RequestModel): + """The request for the HeadObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "if_match": {"tag": "input", "position": "header", "rename": "If-Match"}, + "if_none_match": {"tag": "input", "position": "header", "rename": "If-None-Match"}, + "if_modified_since": {"tag": "input", "position": "header", "rename": "If-Modified-Since"}, + "if_unmodified_since": {"tag": "input", "position": "header", "rename": "If-Unmodified-Since"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_modified_since: Optional[str] = None, + if_unmodified_since: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the source object. + if_match (str, optional): If the ETag specified in the request matches the ETag value of the object + the object and 200 OK are returned. Otherwise, 412 Precondition Failed is returned. + if_none_match (str, optional): If the ETag specified in the request does not match the ETag value of the object, + the object and 200 OK are returned. Otherwise, 304 Not Modified is returned. + if_modified_since (str, optional): If the time specified in this header is earlier + than the object modified time or is invalid, the object and 200 OK are returned. + Otherwise, 304 Not Modified is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + if_unmodified_since (str, optional): If the time specified in this header is + the same as or later than the object modified time,the object and 200 OK are returned. + Otherwise, 412 Precondition Failed is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.if_match = if_match + self.if_none_match = if_none_match + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.request_payer = request_payer + + +class HeadObjectResult(serde.ResultModel): + """The result for the HeadObject operation.""" + + _attribute_map = { + "content_length": {"tag": "output", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_type": {"tag": "output", "position": "header", "rename": "Content-Type"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "last_modified": {"tag": "output", "position": "header", "rename": "Last-Modified", "type": "datetime,httptime"}, + "content_md5": {"tag": "output", "position": "header", "rename": "Content-MD5"}, + "metadata": {"tag": "output", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "output", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "output", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "output", "position": "header", "rename": "Content-Encoding"}, + "expires": {"tag": "output", "position": "header", "rename": "Expires"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "storage_class": {"tag": "output", "position": "header", "rename": "x-oss-storage-class"}, + "object_type": {"tag": "output", "position": "header", "rename": "x-oss-object-type"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "tagging_count": {"tag": "output", "position": "header", "rename": "x-oss-tagging-count", "type": "int"}, + "server_side_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "next_append_position": {"tag": "output", "position": "header", "rename": "x-oss-next-append-position", "type": "int"}, + "expiration": {"tag": "output", "position": "header", "rename": "x-oss-expiration"}, + "restore": {"tag": "output", "position": "header", "rename": "x-oss-restore"}, + "process_status": {"tag": "output", "position": "header", "rename": "x-oss-process-status"}, + "request_charged": {"tag": "output", "position": "header", "rename": "x-oss-request-charged"}, + "allow_origin": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Origin"}, + "allow_methods": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Methods"}, + "allow_age": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Age"}, + "allow_headers": {"tag": "output", "position": "header", "rename": "Access-Control-Allow-Headers"}, + "expose_headers": {"tag": "output", "position": "header", "rename": "Access-Control-Expose-Headers"}, + } + + def __init__( + self, + content_length: Optional[int] = None, + content_type: Optional[str] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + content_md5: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + expires: Optional[str] = None, + hash_crc64: Optional[str] = None, + storage_class: Optional[str] = None, + object_type: Optional[str] = None, + version_id: Optional[str] = None, + tagging_count: Optional[int] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + next_append_position: Optional[str] = None, + expiration: Optional[str] = None, + restore: Optional[str] = None, + process_status: Optional[str] = None, + request_charged: Optional[str] = None, + allow_origin: Optional[str] = None, + allow_methods: Optional[str] = None, + allow_age: Optional[str] = None, + allow_headers: Optional[str] = None, + expose_headers: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + content_length (int, optional): Size of the body in bytes. + content_type (str, optional): A standard MIME type describing the format of the object data. + etag (str, optional): The entity tag (ETag). + An ETag is created when an object is created to identify the content of the object. + last_modified (datetime.datetime, optional): The time when the returned objects were last modified. + content_md5 (str, optional): Content-Md5 for the uploaded object. + metadata (MutableMapping, optional): A map of metadata to store with the object. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + expires (str, optional): The expiration time of the cache in UTC. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + storage_class (str, optional): The storage class of the object. + object_type (str, optional): The type of the object. + version_id (str, optional): Version of the object. + tagging_count (int, optional): The number of tags added to the object. + This header is included in the response only when you have read permissions on tags. + server_side_encryption (str, optional): If the requested object is encrypted by + using a server-side encryption algorithm based on entropy encoding, OSS automatically decrypts + the object and returns the decrypted object after OSS receives the GetObject request. + The x-oss-server-side-encryption header is included in the response to indicate the encryption algorithm + used to encrypt the object on the server. + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + next_append_position (str, optional): The position for the next append operation. + If the type of the object is Appendable, this header is included in the response. + expiration (str, optional): The lifecycle information about the object. + If lifecycle rules are configured for the object, this header is included in the response. + This header contains the following parameters: expiry-date that indicates the expiration time of the object, + and rule-id that indicates the ID of the matched lifecycle rule. + restore (str, optional): The status of the object when you restore an object. + If the storage class of the bucket is Archive and a RestoreObject request is submitted, + process_status (str, optional): The result of an event notification that is triggered for the object. + request_charged (str, optional): The requester. This header is included in the response if the pay-by-requester mode + is enabled for the bucket and the requester is not the bucket owner. The value of this header is requester + allow_origin (str, optional): The origins allowed for cross-origin resource sharing (CORS). + allow_methods (str, optional): The methods allowed for CORS. + allow_age (str, optional): The maximum caching period for CORS. + allow_headers (str, optional): The headers allowed for CORS. + expose_headers (str, optional): The headers that can be accessed by JavaScript applications on the client. + """ + super().__init__(**kwargs) + self.content_length = content_length + self.content_type = content_type + self.etag = etag + self.last_modified = last_modified + self.content_md5 = content_md5 + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.expires = expires + self.hash_crc64 = hash_crc64 + self.storage_class = storage_class + self.object_type = object_type + self.version_id = version_id + self.tagging_count = tagging_count + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.next_append_position = next_append_position + self.expiration = expiration + self.restore = restore + self.process_status = process_status + self.request_charged = request_charged + self.allow_origin = allow_origin + self.allow_methods = allow_methods + self.allow_age = allow_age + self.allow_headers = allow_headers + self.expose_headers = expose_headers + +class GetObjectRequest(serde.RequestModel): + """The request for the GetObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "if_match": {"tag": "input", "position": "header", "rename": "If-Match"}, + "if_none_match": {"tag": "input", "position": "header", "rename": "If-None-Match"}, + "if_modified_since": {"tag": "input", "position": "header", "rename": "If-Modified-Since"}, + "if_unmodified_since": {"tag": "input", "position": "header", "rename": "If-Unmodified-Since"}, + "range_header": {"tag": "input", "position": "header", "rename": "Range"}, + "range_behavior": {"tag": "input", "position": "header", "rename": "x-oss-range-behavior"}, + "response_cache_control": {"tag": "input", "position": "query", "rename": "response-cache-control"}, + "response_content_disposition": {"tag": "input", "position": "query", "rename": "response-content-disposition"}, + "response_content_encoding": {"tag": "input", "position": "query", "rename": "response-content-encoding"}, + "response_content_language": {"tag": "input", "position": "query", "rename": "response-content-language"}, + "response_content_type": {"tag": "input", "position": "query", "rename": "response-content-type"}, + "response_expires": {"tag": "input", "position": "query", "rename": "response-expires"}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "process": {"tag": "input", "position": "query", "rename": "x-oss-process"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "progress_fn": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_modified_since: Optional[str] = None, + if_unmodified_since: Optional[str] = None, + range_header: Optional[str] = None, + range_behavior: Optional[str] = None, + response_cache_control: Optional[str] = None, + response_content_disposition: Optional[str] = None, + response_content_encoding: Optional[str] = None, + response_content_language: Optional[str] = None, + response_content_type: Optional[str] = None, + response_expires: Optional[str] = None, + version_id: Optional[str] = None, + traffic_limit: Optional[int] = None, + process: Optional[str] = None, + request_payer: Optional[str] = None, + progress_fn: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + if_match (str, optional): If the ETag specified in the request matches the ETag value of the object + the object and 200 OK are returned. Otherwise, 412 Precondition Failed is returned. + if_none_match (str, optional): If the ETag specified in the request does not match the ETag value of the object, + the object and 200 OK are returned. Otherwise, 304 Not Modified is returned. + if_modified_since (str, optional): If the time specified in this header is earlier + than the object modified time or is invalid, the object and 200 OK are returned. + Otherwise, 304 Not Modified is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + if_unmodified_since (str, optional): If the time specified in this header is + the same as or later than the object modified time,the object and 200 OK are returned. + Otherwise, 412 Precondition Failed is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + range_header (str, optional): The content range of the object to be returned. + If the value of Range is valid, the total size of the object and the content range are returned. + For example, Content-Range: bytes 0~9/44 indicates that the total size of the object is 44 bytes, + and the range of data returned is the first 10 bytes. + However, if the value of Range is invalid, the entire object is returned, + and the response does not include the Content-Range parameter. + range_behavior (str, optional): Specify standard behaviors to download data by range. + If the value is "standard", the download behavior is modified when the specified range is not within the valid range. + For an object whose size is 1,000 bytes: + 1) If you set Range: bytes to 500-2000, the value at the end of the range is invalid. + In this case, OSS returns HTTP status code 206 and the data that is within the range of byte 500 to byte 999. + 2) If you set Range: bytes to 1000-2000, the value at the start of the range is invalid. + In this case, OSS returns HTTP status code 416 and the InvalidRange error code. + response_cache_control (str, optional): The cache-control header to be returned in the response. + response_content_disposition (str, optional): The content-disposition header to be returned in the response. + response_content_encoding (str, optional): The content-encoding header to be returned in the response. + response_content_language (str, optional): The content-language header to be returned in the response. + response_content_type (str, optional): The content-type header to be returned in the response. + response_expires (str, optional): The expires header to be returned in the response. + version_id (str, optional): VersionId used to reference a specific version of the object. + traffic_limit (int, optional): Specify the speed limit value. + The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + process (str, optional): Image processing parameters. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + progress_fn (Any, optional): Progress callback function. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.if_match = if_match + self.if_none_match = if_none_match + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.range_header = range_header + self.range_behavior = range_behavior + self.response_cache_control = response_cache_control + self.response_content_disposition = response_content_disposition + self.response_content_encoding = response_content_encoding + self.response_content_language = response_content_language + self.response_content_type = response_content_type + self.response_expires = response_expires + self.version_id = version_id + self.traffic_limit = traffic_limit + self.process = process + self.request_payer = request_payer + self.progress_fn = progress_fn + + +class GetObjectResult(serde.ResultModel): + """The result for the GetObject operation.""" + + _attribute_map = { + "content_length": {"tag": "output", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_range": {"tag": "output", "position": "header", "rename": "Content-Range"}, + "content_type": {"tag": "output", "position": "header", "rename": "Content-Type"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "last_modified": {"tag": "output", "position": "header", "rename": "Last-Modified", "type": "datetime,httptime"}, + "content_md5": {"tag": "output", "position": "header", "rename": "Content-MD5"}, + "metadata": {"tag": "output", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "output", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "output", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "output", "position": "header", "rename": "Content-Encoding"}, + "expires": {"tag": "output", "position": "header", "rename": "Expires"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "storage_class": {"tag": "output", "position": "header", "rename": "x-oss-storage-class"}, + "object_type": {"tag": "output", "position": "header", "rename": "x-oss-object-type"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "tagging_count": {"tag": "output", "position": "header", "rename": "x-oss-tagging-count", "type": "int"}, + "server_side_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "next_append_position": {"tag": "output", "position": "header", "rename": "x-oss-next-append-position", "type": "int"}, + "expiration": {"tag": "output", "position": "header", "rename": "x-oss-expiration"}, + "restore": {"tag": "output", "position": "header", "rename": "x-oss-restore"}, + "process_status": {"tag": "output", "position": "header", "rename": "x-oss-process-status"}, + "delete_marker": {"tag": "output", "position": "header", "rename": "x-oss-delete-marker", "type": "bool"}, + "body": {}, + } + + def __init__( + self, + content_length: Optional[int] = None, + content_range: Optional[str] = None, + content_type: Optional[str] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + content_md5: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + expires: Optional[str] = None, + hash_crc64: Optional[str] = None, + storage_class: Optional[str] = None, + object_type: Optional[str] = None, + version_id: Optional[str] = None, + tagging_count: Optional[int] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + next_append_position: Optional[str] = None, + expiration: Optional[str] = None, + restore: Optional[str] = None, + process_status: Optional[str] = None, + delete_marker: Optional[bool] = None, + body: Optional[StreamBody] = None, + **kwargs: Any + ) -> None: + """ + Args: + content_length (int, optional): Size of the body in bytes. + content_range (str, optional): The portion of the object returned in the response. + content_type (str, optional): A standard MIME type describing the format of the object data. + etag (str, optional): The entity tag (ETag). + An ETag is created when an object is created to identify the content of the object. + last_modified (datetime.datetime, optional): The time when the returned objects were last modified. + content_md5 (str, optional): Content-Md5 for the uploaded object. + metadata (MutableMapping, optional): A map of metadata to store with the object. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + expires (str, optional): The expiration time of the cache in UTC. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + storage_class (str, optional): The storage class of the object. + object_type (str, optional): The type of the object. + version_id (str, optional): Version of the object. + tagging_count (int, optional): The number of tags added to the object. + This header is included in the response only when you have read permissions on tags. + server_side_encryption (str, optional): If the requested object is encrypted by + using a server-side encryption algorithm based on entropy encoding, OSS automatically decrypts + the object and returns the decrypted object after OSS receives the GetObject request. + The x-oss-server-side-encryption header is included in the response to indicate the encryption algorithm + used to encrypt the object on the server. + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + next_append_position (str, optional): The position for the next append operation. + If the type of the object is Appendable, this header is included in the response. + expiration (str, optional): The lifecycle information about the object. + If lifecycle rules are configured for the object, this header is included in the response. + This header contains the following parameters: expiry-date that indicates the expiration time of the object, + and rule-id that indicates the ID of the matched lifecycle rule. + restore (str, optional): The status of the object when you restore an object. + If the storage class of the bucket is Archive and a RestoreObject request is submitted, + process_status (str, optional): The result of an event notification that is triggered for the object. + delete_marker (bool, optional): Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. + body (Any, optional): Object data. + """ + super().__init__(**kwargs) + self.content_length = content_length + self.content_range = content_range + self.content_type = content_type + self.etag = etag + self.last_modified = last_modified + self.content_md5 = content_md5 + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.expires = expires + self.hash_crc64 = hash_crc64 + self.storage_class = storage_class + self.object_type = object_type + self.version_id = version_id + self.tagging_count = tagging_count + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.next_append_position = next_append_position + self.expiration = expiration + self.restore = restore + self.process_status = process_status + self.delete_marker = delete_marker + self.body = body + + + +class AppendObjectRequest(serde.RequestModel): + """The request for the AppendObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "position": {"tag": "input", "position": "query", "rename": "position", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl"}, + "storage_class": {"tag": "input", "position": "header", "rename": "x-oss-storage-class"}, + "metadata": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "input", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "input", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "input", "position": "header", "rename": "Content-Encoding"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_md5": {"tag": "input", "position": "header", "rename": "Content-MD5"}, + "content_type": {"tag": "input", "position": "header", "rename": "Content-Type"}, + "expires": {"tag": "input", "position": "header", "rename": "Expires"}, + "server_side_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "tagging": {"tag": "input", "position": "header", "rename": "x-oss-tagging"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "body": {"tag": "input", "position": "body"}, + "progress_fn": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + position: int = None, + acl: Optional[str] = None, + storage_class: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_length: Optional[int] = None, + content_md5: Optional[str] = None, + content_type: Optional[str] = None, + expires: Optional[str] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + tagging: Optional[str] = None, + forbid_overwrite: Optional[bool] = None, + traffic_limit: Optional[int] = None, + request_payer: Optional[str] = None, + body: Optional[BodyType] = None, + progress_fn: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + position (str, required): The position from which the AppendObject operation starts. + Each time an AppendObject operation succeeds, the x-oss-next-append-position header is included in + the response to specify the position from which the next AppendObject operation starts. + acl (str, optional): The access control list (ACL) of the object. + storage_class (str, optional): The storage class of the object. + metadata (MutableMapping,The metadata of the object that you want to upload. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + content_md5 (str, optional): The MD5 hash of the object that you want to upload. + content_type (str, optional): A standard MIME type describing the format of the contents. + expires (str, optional): The expiration time of the cache in UTC. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only + when the x-oss-server-side-encryption header is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + tagging (str, optional): The tags that are specified for the object by using a key-value pair. + You can specify multiple tags for an object. Example: TagA=A&TagB=B. + callback (str, optional): A callback parameter is a Base64-encoded string that contains multiple fields in the JSON format. + callback_var (str, optional): Configure custom parameters by using the callback-var parameter. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + traffic_limit (int, optional): Specify the speed limit value. + The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + body (BodyType,optional): Object data. + progress_fn (Any,optional): Progress callback function. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.position = position + self.acl = acl + self.storage_class = storage_class + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.content_length = content_length + self.content_md5 = content_md5 + self.content_type = content_type + self.expires = expires + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.tagging = tagging + self.forbid_overwrite = forbid_overwrite + self.traffic_limit = traffic_limit + self.request_payer = request_payer + self.body = body + self.progress_fn = progress_fn + + +class AppendObjectResult(serde.ResultModel): + """The result for the AppendObject operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "next_position": {"tag": "output", "position": "header", "rename": "x-oss-next-append-position", "type": "int"}, + "server_side_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + hash_crc64: Optional[str] = None, + next_position: Optional[int] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + next_position (str, optional): The position that must be provided in the next request, + which is the current length of the object. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only when the x-oss-server-side-encryption header + is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + """ + super().__init__(**kwargs) + self.version_id = version_id + self.hash_crc64 = hash_crc64 + self.next_position = next_position + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + + +class CopyObjectRequest(serde.RequestModel): + """The request for the CopyObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "source_bucket": {"tag": "input", "position": "nop"}, + "source_key": {"tag": "input", "position": "nop", "required": True}, + "source_version_id": {"tag": "input", "position": "nop"}, + "if_match": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-match"}, + "if_none_match": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-none-match"}, + "if_modified_since": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-modified-since"}, + "if_unmodified_since": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-unmodified-since"}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl"}, + "storage_class": {"tag": "input", "position": "header", "rename": "x-oss-storage-class"}, + "metadata": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "input", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "input", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "input", "position": "header", "rename": "Content-Encoding"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_md5": {"tag": "input", "position": "header", "rename": "Content-MD5"}, + "content_type": {"tag": "input", "position": "header", "rename": "Content-Type"}, + "expires": {"tag": "input", "position": "header", "rename": "Expires"}, + "metadata_directive": {"tag": "input", "position": "header", "rename": "x-oss-metadata-directive"}, + "server_side_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "tagging": {"tag": "input", "position": "header", "rename": "x-oss-tagging"}, + "tagging_directive": {"tag": "input", "position": "header", "rename": "x-oss-tagging-directive"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "progress_fn": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + source_bucket: Optional[str] = None, + source_key: Optional[str] = None, + source_version_id: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_modified_since: Optional[str] = None, + if_unmodified_since: Optional[str] = None, + acl: Optional[str] = None, + storage_class: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_length: Optional[int] = None, + content_md5: Optional[str] = None, + content_type: Optional[str] = None, + expires: Optional[str] = None, + metadata_directive: Optional[str] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + tagging: Optional[str] = None, + tagging_directive: Optional[str] = None, + forbid_overwrite: Optional[bool] = None, + traffic_limit: Optional[int] = None, + request_payer: Optional[str] = None, + progress_fn: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + source_bucket (str, optional): The name of the source bucket. + source_key (str, required): The name of the source object. + source_version_id (str, optional): The version ID of the source object. + if_match (str, optional): Specifies whether the object that is uploaded by + calling the CopyObject operation overwrites an existing object that has the same name. + Valid values: true and false + if_none_match (str, optional): If the ETag specified in the request does not match the ETag value of the object, + the object and 200 OK are returned. Otherwise, 304 Not Modified is returned. + if_modified_since (str, optional): If the time specified in this header is earlier than + the object modified time or is invalid, the object and 200 OK are returned. + Otherwise, 304 Not Modified is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + if_unmodified_since (str, optional): If the time specified in this header is the same as or later than + the object modified time, the object and 200 OK are returned. Otherwise, 412 Precondition Failed is returned. + The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + acl (str, optional): The access control list (ACL) of the object. + storage_class (str, optional): The storage class of the object. + metadata (MutableMapping,The metadata of the object that you want to upload. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + content_md5 (str, optional): The MD5 hash of the object that you want to upload. + content_type (str, optional): A standard MIME type describing the format of the contents. + expires (str, optional): The expiration time of the cache in UTC. + metadata_directive (str, optional): The method that is used to configure the metadata of the destination object. + COPY (default): The metadata of the source object is copied to the destination object. + The configurations of the x-oss-server-side-encryption header of the source object + header of the source object are not copied to the destination object. + The x-oss-server-side-encryption header in the CopyObject request specifies + the method used to encrypt the destination object. + REPLACE: The metadata specified in the request is used as the metadata of the destination object. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only + when the x-oss-server-side-encryption header is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + tagging (str, optional): The tags that are specified for the object by using a key-value pair. + You can specify multiple tags for an object. Example: TagA=A&TagB=B. + tagging_directive (str, optional): The method that is used to configure tags for the destination object. + Valid values: Copy (default): The tags of the source object are copied to the destination object. + Replace: The tags specified in the request are configured for the destination object. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + traffic_limit (int, optional): Specify the speed limit value. + The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + progress_fn (Any,optional): Progress callback function, it works in Copier.copy only. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.source_bucket = source_bucket + self.source_key = source_key + self.source_version_id = source_version_id + self.if_match = if_match + self.if_none_match = if_none_match + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.acl = acl + self.storage_class = storage_class + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.content_length = content_length + self.content_md5 = content_md5 + self.content_type = content_type + self.expires = expires + self.metadata_directive = metadata_directive + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.tagging = tagging + self.tagging_directive = tagging_directive + self.forbid_overwrite = forbid_overwrite + self.traffic_limit = traffic_limit + self.request_payer = request_payer + self.progress_fn = progress_fn + + +class CopyObjectResult(serde.ResultModel): + """The result for the CopyObject operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "source_version_id": {"tag": "output", "position": "header", "rename": "x-oss-copy-source-version-id"}, + "server_side_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "output", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "output", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "etag": {"tag": "xml", "rename": "ETag"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + hash_crc64: Optional[str] = None, + source_version_id: Optional[str] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + source_version_id (str, optional): The version ID of the source object. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only when the x-oss-server-side-encryption header + is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + last_modified (str, optional): The time when the returned objects were last modified. + etag (str, optional): The entity tag (ETag). + An ETag is created when an object is created to identify the content of the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + self.hash_crc64 = hash_crc64 + self.source_version_id = source_version_id + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.last_modified = last_modified + self.etag = etag + + +class DeleteObjectRequest(serde.RequestModel): + """The request for the DeleteObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class DeleteObjectResult(serde.ResultModel): + """The result for the DeleteObject operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "delete_marker": {"tag": "output", "position": "header", "rename": "x-oss-delete-marker"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + delete_marker: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + delete_marker (str, optional): Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. + """ + super().__init__(**kwargs) + self.version_id = version_id + self.delete_marker = delete_marker + + +class DeleteObject(serde.Model): + """The information about a delete object.""" + + def __init__( + self, + key: Optional[str] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the object that you want to delete. + version_id (str, optional): The version ID of the object that you want to delete. + """ + super().__init__(**kwargs) + self.key = key + self.version_id = version_id + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "version_id": {"tag": "xml", "rename": "VersionId"}, + } + _xml_map = { + "name": "Object" + } + +class DeleteMultipleObjectsRequest(serde.RequestModel): + """The request for the DeleteMultipleObjects operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length"}, + "objects": {"tag": "input", "position": "nop", "required": True}, + "quiet": {"tag": "input", "position": "nop"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + encoding_type: Optional[str] = None, + content_length: Optional[int] = None, + objects: Optional[List[DeleteObject]] = None, + quiet: Optional[bool] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + encoding_type (str, optional): The encoding type of the object names in the response. Valid value: url + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + objects ([DeleteObject], optional): The container that stores information about you want to delete objects. + quiet (bool, optional): Specifies whether to enable the Quiet return mode. + The DeleteMultipleObjects operation provides the following return modes: Valid value: true,false + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.encoding_type = encoding_type + self.content_length = content_length + self.objects = objects + self.quiet = quiet + self.request_payer = request_payer + + +class DeletedInfo(serde.Model): + """The information about a delete object.""" + + def __init__( + self, + key: Optional[str] = None, + version_id: Optional[str] = None, + delete_marker: Optional[bool] = None, + delete_marker_version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the deleted object. + version_id (str, optional): The version ID of the object that you deleted. + delete_marker (bool, optional): Indicates whether the deleted version is a delete marker. + delete_marker_version_id (str, optional): The version ID of the delete marker. + """ + super().__init__(**kwargs) + self.key = key + self.version_id = version_id + self.delete_marker = delete_marker + self.delete_marker_version_id = delete_marker_version_id + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "version_id": {"tag": "xml", "rename": "VersionId"}, + "delete_marker": {"tag": "xml", "rename": "DeleteMarker"}, + "delete_marker_version_id": {"tag": "xml", "rename": "DeleteMarkerVersionId"}, + } + _xml_map = { + "name": "Deleted" + } + + +class DeleteMultipleObjectsResult(serde.ResultModel): + """The result for the DeleteMultipleObjects operation.""" + + _attribute_map = { + "deleted_objects": {"tag": "xml", "rename": "Deleted", "type": "[DeletedInfo]"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + } + + _dependency_map = { + "DeletedInfo": {"new": lambda: DeletedInfo()}, + } + + _xml_map = { + "name": "DeleteResult" + } + + def __init__( + self, + deleted_objects: Optional[List[DeletedInfo]] = None, + encoding_type: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + deleted_objects ([DeletedInfo], optional): The container that stores information about the deleted objects. + encoding_type (str, optional): The encoding type of the content in the response. + If encoding-type is specified in the request, the object name is encoded in the returned result. + """ + super().__init__(**kwargs) + self.deleted_objects = deleted_objects + self.encoding_type = encoding_type + +class GetObjectMetaRequest(serde.RequestModel): + """The request for the GetObjectMeta operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the source object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class GetObjectMetaResult(serde.ResultModel): + """The result for the GetObjectMeta operation.""" + + _attribute_map = { + "content_length": {"tag": "output", "position": "header", "rename": "Content-Length", "type": "int"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "last_modified": {"tag": "output", "position": "header", "rename": "Last-Modified", "type": "datetime,httptime"}, + "last_access_time": {"tag": "output", "position": "header", "rename": "x-oss-last-access-time", "type": "datetime,httptime"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + } + + def __init__( + self, + content_length: Optional[int] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + last_access_time: Optional[datetime.datetime] = None, + version_id: Optional[str] = None, + hash_crc64: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + content_length (int, optional): Size of the body in bytes. + etag (str, optional): The entity tag (ETag). + An ETag is created when an object is created to identify the content of the object. + last_modified (datetime.datetime, optional): The time when the returned objects were last modified. + last_access_time (datetime.datetime, optional): The time when the object was last accessed. + version_id (str, optional): Version of the object. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + """ + super().__init__(**kwargs) + self.content_length = content_length + self.etag = etag + self.last_modified = last_modified + self.last_access_time = last_access_time + self.version_id = version_id + self.hash_crc64 = hash_crc64 + + +class RestoreRequest(serde.Model): + """The configuration information about the RestoreObject request.""" + + def __init__( + self, + days: Optional[int] = None, + tier: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + days (int, optional): The duration within which the restored object remains in the restored state. + tier (str, optional): The restoration priority of Cold Archive or Deep Cold Archive objects. + Valid values:Expedited,Standard,Bulk + """ + super().__init__(**kwargs) + self.days = days + self.tier = tier + + _attribute_map = { + "days": {"tag": "xml", "rename": "Days"}, + "tier": {"tag": "xml", "rename": "JobParameters.Tier"}, + } + _xml_map = { + "name": "RestoreRequest" + } + + +class RestoreObjectRequest(serde.RequestModel): + """The request for the RestoreObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "restore_request": {"tag": "input", "position": "body", "rename": "RestoreRequest", "type": "xml"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + restore_request: Optional[RestoreRequest] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the source object. + restore_request (RestoreRequest, optional): The container that stores information about the RestoreObject request. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.restore_request = restore_request + self.request_payer = request_payer + + +class RestoreObjectResult(serde.ResultModel): + """The result for the RestoreObject operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "restore_priority": {"tag": "output", "position": "header", "rename": "x-oss-object-restore-priority"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + restore_priority: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + restore_priority (str, optional): The restoration priority. + This header is displayed only for the Cold Archive or Deep Cold Archive object in the restored state. + """ + super().__init__(**kwargs) + self.version_id = version_id + self.restore_priority = restore_priority + + +class PutObjectAclRequest(serde.RequestModel): + """The request for the PutObjectAcl operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + acl: Optional[str] = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + acl (str, required): The access control list (ACL) of the object. + version_id (str, optional): The version ID of the source object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.acl = acl + self.version_id = version_id + self.request_payer = request_payer + + +class PutObjectAclResult(serde.ResultModel): + """The result for the PutObjectAcl operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + + +class GetObjectAclRequest(serde.RequestModel): + """The request for the GetObjectAcl operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): The version ID of the source object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class GetObjectAclResult(serde.ResultModel): + """The result for the GetObjectAcl operation.""" + + _attribute_map = { + "acl": {"tag": "xml", "rename": "AccessControlList/Grant"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + } + + _dependency_map = { + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = { + "name": "AccessControlPolicy" + } + + def __init__( + self, + acl: Optional[str] = None, + owner: Optional[Owner] = None, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + acl (str, optional): The ACL of the object. Default value: default. + owner (Owner, optional): The container that stores information about the object owner. + version_id (str, optional): Version of the object. + """ + super().__init__(**kwargs) + self.acl = acl + self.owner = owner + self.version_id = version_id + +class InitiateMultipartUploadRequest(serde.RequestModel): + """The request for the InitiateMultipartUpload operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "storage_class": {"tag": "input", "position": "header", "rename": "x-oss-storage-class"}, + "metadata": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "cache_control": {"tag": "input", "position": "header", "rename": "Cache-Control"}, + "content_disposition": {"tag": "input", "position": "header", "rename": "Content-Disposition"}, + "content_encoding": {"tag": "input", "position": "header", "rename": "Content-Encoding"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length", "type": "int"}, + "content_md5": {"tag": "input", "position": "header", "rename": "Content-MD5"}, + "content_type": {"tag": "input", "position": "header", "rename": "Content-Type"}, + "expires": {"tag": "input", "position": "header", "rename": "Expires"}, + "server_side_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption"}, + "server_side_data_encryption": {"tag": "input", "position": "header", "rename": "x-oss-server-side-data-encryption"}, + "sse_kms_key_id": {"tag": "input", "position": "header", "rename": "x-oss-server-side-encryption-key-id"}, + "tagging": {"tag": "input", "position": "header", "rename": "x-oss-tagging"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "cse_data_size": {}, + "cse_part_size": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + encoding_type: Optional[str] = None, + storage_class: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + cache_control: Optional[str] = None, + content_disposition: Optional[str] = None, + content_encoding: Optional[str] = None, + content_length: Optional[int] = None, + content_md5: Optional[str] = None, + content_type: Optional[str] = None, + expires: Optional[str] = None, + server_side_encryption: Optional[str] = None, + server_side_data_encryption: Optional[str] = None, + sse_kms_key_id: Optional[str] = None, + tagging: Optional[str] = None, + forbid_overwrite: Optional[bool] = None, + request_payer: Optional[str] = None, + cse_data_size: Optional[int] = None, + cse_part_size: Optional[int] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + encoding_type (str, optional): The encoding type of the object names in the response. Valid value: url + storage_class (str, optional): The storage class of the object. + metadata (MutableMapping,The metadata of the object that you want to upload. + cache_control (str, optional): The caching behavior of the web page when the object is downloaded. + content_disposition (str, optional): The method that is used to access the object. + content_encoding (str, optional): The method that is used to encode the object. + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + content_md5 (str, optional): The MD5 hash of the object that you want to upload. + content_type (str, optional): A standard MIME type describing the format of the contents. + expires (str, optional): The expiration time of the cache in UTC. + server_side_encryption (str, optional): The encryption method on the server side when an object is created. + Valid values: AES256 and KMS + server_side_data_encryption (str, optional): The ID of the customer master key (CMK) + that is managed by Key Management Service (KMS). This header is valid only + when the x-oss-server-side-encryption header is set to KMS. + sse_kms_key_id (str, optional): The ID of the customer master key (CMK) that is managed by Key Management Service (KMS). + tagging (str, optional): The tags that are specified for the object by using a key-value pair. + You can specify multiple tags for an object. Example: TagA=A&TagB=B. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + cse_data_size (int, optional): The total size when using client side encryption. + Only valid in EncryptionClient. + cse_part_size (int, optional): The part size when using client side encryption. + Only valid in EncryptionClient. + It must be aligned to the secret iv length. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.encoding_type = encoding_type + self.storage_class = storage_class + self.metadata = metadata + self.cache_control = cache_control + self.content_disposition = content_disposition + self.content_encoding = content_encoding + self.content_length = content_length + self.content_md5 = content_md5 + self.content_type = content_type + self.expires = expires + self.server_side_encryption = server_side_encryption + self.server_side_data_encryption = server_side_data_encryption + self.sse_kms_key_id = sse_kms_key_id + self.tagging = tagging + self.forbid_overwrite = forbid_overwrite + self.request_payer = request_payer + self.cse_data_size = cse_data_size + self.cse_part_size = cse_part_size + +class InitiateMultipartUploadResult(serde.ResultModel): + """The result for the InitiateMultipartUpload operation.""" + + _attribute_map = { + "bucket": {"tag": "xml", "rename": "Bucket"}, + "key": {"tag": "xml", "rename": "Key"}, + "upload_id": {"tag": "xml", "rename": "UploadId"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "cse_multipart_context": {}, + } + + _xml_map = { + "name": "InitiateMultipartUploadResult" + } + + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + upload_id: Optional[str] = None, + encoding_type: Optional[str] = None, + cse_multipart_context: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, optional): The name of the bucket to which the object is uploaded by the multipart upload task. + key (str, optional): The name of the object that is uploaded by the multipart upload task. + upload_id (str, optional): The upload ID that uniquely identifies the multipart upload task. + encoding_type (str, optional): The encoding type of the object names in the response. Valid value: url + cse_multipart_context (Any, optional): The encryption context for multipart upload when using client side encryption, + only valid in EncryptionClient + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.upload_id = upload_id + self.encoding_type = encoding_type + self.cse_multipart_context = cse_multipart_context + +class UploadPartRequest(serde.RequestModel): + """The request for the UploadPart operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "part_number": {"tag": "input", "position": "query", "rename": "partNumber", "required": True}, + "upload_id": {"tag": "input", "position": "query", "rename": "uploadId", "required": True}, + "content_md5": {"tag": "input", "position": "header", "rename": "Content-MD5"}, + "content_length": {"tag": "input", "position": "header", "rename": "Content-Length", "type": "int"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "body": {"tag": "input", "position": "body"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + "progress_fn": {}, + "cse_multipart_context": {}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + part_number: int = None, + upload_id: str = None, + content_md5: Optional[str] = None, + content_length: Optional[int] = None, + traffic_limit: Optional[int] = None, + body: Optional[BodyType] = None, + request_payer: Optional[str] = None, + progress_fn: Optional[Any] = None, + cse_multipart_context: Optional[Any] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + part_number (int, required): Each uploaded part is identified by a number, Value: 1-10000. + The size limit of a single part is between 100 KB and 5 GB. + upload_id (str, required): The ID of the multipart upload task. + content_md5 (str, optional): The MD5 hash of the object that you want to upload. + content_length (int, optional): The size of the data in the HTTP message body. Unit: bytes. + traffic_limit (str, optional): Specify the speed limit value. The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + body (str, optional): Object data. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + progress_fn (str, optional): Progress callback function. + cse_multipart_context (Any, optional): The encryption context for multipart upload when using client side encryption. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.part_number = part_number + self.upload_id = upload_id + self.content_md5 = content_md5 + self.content_length = content_length + self.traffic_limit = traffic_limit + self.body = body + self.request_payer = request_payer + self.progress_fn = progress_fn + self.cse_multipart_context = cse_multipart_context + + +class UploadPartResult(serde.ResultModel): + """The result for the UploadPart operation.""" + + _attribute_map = { + "content_md5": {"tag": "output", "position": "header", "rename": "Content-MD5"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + } + + def __init__( + self, + content_md5: Optional[str] = None, + etag: Optional[str] = None, + hash_crc64: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + content_md5 (str, optional): Entity tag for the uploaded part. + etag (str, optional): The MD5 hash of the part that you want to upload. + hash_crc64 (str, optional): The 64-bit CRC value of the part. + This value is calculated based on the ECMA-182 standard. + """ + super().__init__(**kwargs) + self.content_md5 = content_md5 + self.etag = etag + self.hash_crc64 = hash_crc64 + +class UploadPartCopyRequest(serde.RequestModel): + """The request for the UploadPartCopy operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "part_number": {"tag": "input", "position": "query", "rename": "partNumber", "required": True}, + "upload_id": {"tag": "input", "position": "query", "rename": "uploadId", "required": True}, + "source_bucket": {"tag": "input", "position": "nop"}, + "source_key": {"tag": "input", "position": "nop", "required": True}, + "source_version_id": {"tag": "input", "position": "nop"}, + "source_range": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-range"}, + "if_match": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-match"}, + "if_none_match": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-none-match"}, + "if_modified_since": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-modified-since"}, + "if_unmodified_since": {"tag": "input", "position": "header", "rename": "x-oss-copy-source-if-unmodified-since"}, + "traffic_limit": {"tag": "input", "position": "header", "rename": "x-oss-traffic-limit", "type": "int"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + part_number: Optional[int] = None, + upload_id: Optional[str] = None, + source_bucket: Optional[str] = None, + source_key: Optional[str] = None, + source_version_id: Optional[str] = None, + source_range: Optional[str] = None, + if_match: Optional[str] = None, + if_none_match: Optional[str] = None, + if_modified_since: Optional[str] = None, + if_unmodified_since: Optional[str] = None, + traffic_limit: Optional[int] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + part_number (int, optional): Each uploaded part is identified by a number, Value: 1-10000. + The size limit of a single part is between 100 KB and 5 GB. + upload_id (str, optional): The ID of the multipart upload task. + source_bucket (str, optional): The name of the source bucket. + source_key (str, required): The name of the source object. + source_version_id (str, optional): The version ID of the source object. + source_range (str, optional): The range of bytes to copy data from the source object. + if_match (str, optional): Specifies whether the object that is uploaded by + calling the CopyObject operation overwrites an existing object that has the same name. + Valid values: true and false + if_none_match (str, optional): If the ETag specified in the request does not match the ETag value of the object, + the object and 200 OK are returned. Otherwise, 304 Not Modified is returned. + if_modified_since (str, optional): If the time specified in this header is earlier than + the object modified time or is invalid, the object and 200 OK are returned. + Otherwise, 304 Not Modified is returned. The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + if_unmodified_since (str, optional): If the time specified in this header is the same as or later than + the object modified time, the object and 200 OK are returned. Otherwise, 412 Precondition Failed is returned. + The time must be in GMT. Example: Fri, 13 Nov 2015 14:47:53 GMT. + traffic_limit (str, optional): Specify the speed limit value. The speed limit value ranges from 245760 to 838860800, with a unit of bit/s. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.part_number = part_number + self.upload_id = upload_id + self.source_bucket = source_bucket + self.source_key = source_key + self.source_version_id = source_version_id + self.source_range = source_range + self.if_match = if_match + self.if_none_match = if_none_match + self.if_modified_since = if_modified_since + self.if_unmodified_since = if_unmodified_since + self.traffic_limit = traffic_limit + self.request_payer = request_payer + + +class UploadPartCopyResult(serde.ResultModel): + """The result for the UploadPartCopy operation.""" + + _attribute_map = { + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "etag": {"tag": "xml", "rename": "ETag"}, + "source_version_id": {"tag": "output", "position": "header", "rename": "x-oss-copy-source-version-id"}, + } + + _xml_map = { + "name": "CopyPartResult" + } + + def __init__( + self, + last_modified: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + source_version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + last_modified (datetime, optional): The time when the returned objects were last modified. + etag (str, optional): Entity tag for the uploaded part. + source_version_id (str, optional): The version ID of the source object. + """ + super().__init__(**kwargs) + self.last_modified = last_modified + self.etag = etag + self.source_version_id = source_version_id + + +class UploadPart(serde.Model): + """The inforamtion about the content of the Part.""" + + def __init__( + self, + part_number: Optional[int] = None, + etag: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + part_number (int, optional): The number of parts. + etag (str, optional): The ETag values that are returned by OSS after parts are uploaded. + """ + super().__init__(**kwargs) + self.part_number = part_number + self.etag = etag + + _attribute_map = { + "part_number": {"tag": "xml", "rename": "PartNumber"}, + "etag": {"tag": "xml", "rename": "ETag"}, + } + _xml_map = { + "name": "Part" + } + + +class CompleteMultipartUpload(serde.Model): + """The container that stores the information about the uploaded parts.""" + + def __init__( + self, + parts: Optional[List[UploadPart]] = None, + **kwargs: Any + ) -> None: + """ + parts ([UploadPart], optional): The uploaded parts. + """ + super().__init__(**kwargs) + self.parts = parts + + _attribute_map = { + "parts": {"tag": "xml", "rename": "Part"}, + } + _xml_map = { + "name": "CompleteMultipartUpload" + } + + +class CompleteMultipartUploadRequest(serde.RequestModel): + """The request for the CompleteMultipartUpload operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "upload_id": {"tag": "input", "position": "query", "rename": "uploadId", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl"}, + "complete_multipart_upload": {"tag": "input", "position": "body", "rename": "CompleteMultipartUpload", "type": "xml"}, + "complete_all": {"tag": "input", "position": "header", "rename": "x-oss-complete-all"}, + "callback": {"tag": "input", "position": "header", "rename": "x-oss-callback"}, + "callback_var": {"tag": "input", "position": "header", "rename": "x-oss-callback-var"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + upload_id: str = None, + acl: Optional[str] = None, + complete_multipart_upload: Optional[CompleteMultipartUpload] = None, + complete_all: Optional[str] = None, + callback: Optional[str] = None, + callback_var: Optional[str] = None, + forbid_overwrite: Optional[bool] = None, + encoding_type: Optional[bool] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + upload_id (str, optional): The ID of the multipart upload task. + acl (str, optional): The access control list (ACL) of the object. + complete_multipart_upload (CompleteMultipartUpload, optional): The container that stores the content of the CompleteMultipartUpload + complete_all (str, optional): Specifies whether to list all parts that are uploaded by using the current upload ID. + Valid value: yes + callback (str, optional): A callback parameter is a Base64-encoded string that contains multiple fields in the JSON format. + callback_var (str, optional): Configure custom parameters by using the callback-var parameter. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + encoding_type (str, optional): The encoding type of the object names in the response. Valid value: url. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.upload_id = upload_id + self.acl = acl + self.complete_multipart_upload = complete_multipart_upload + self.complete_all = complete_all + self.callback = callback + self.callback_var = callback_var + self.forbid_overwrite = forbid_overwrite + self.encoding_type = encoding_type + self.request_payer = request_payer + + +class CompleteMultipartUploadResult(serde.ResultModel): + """The result for the CompleteMultipartUpload operation.""" + + _attribute_map = { + "bucket": {"tag": "xml", "rename": "Bucket"}, + "key": {"tag": "xml", "rename": "Key"}, + "location": {"tag": "xml", "rename": "Location"}, + "etag": {"tag": "xml", "rename": "ETag"}, + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "hash_crc64": {"tag": "output", "position": "header", "rename": "x-oss-hash-crc64ecma"}, + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "callback_result": {"tag": "output", "position": "body", "type": "dict,json"}, + } + + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + location: Optional[str] = None, + etag: Optional[str] = None, + encoding_type: Optional[str] = None, + hash_crc64: Optional[str] = None, + version_id: Optional[str] = None, + callback_result: Optional[Dict] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, optional): The name of the bucket. + key (str, optional): The name of the uploaded object. + location (str, optional): The URL that is used to access the uploaded object. + etag (str, optional): The ETag that is generated when an object is created. + ETags are used to identify the content of objects. + encoding_type (str, optional): The encoding type of the name of the deleted object in the response. + If encoding-type is specified in the request, the object name is encoded in the returned result. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + version_id (str, optional): Version of the object. + callback_result (dict, optional): Callback result, + it is valid only when the callback is set. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.location = location + self.etag = etag + self.encoding_type = encoding_type + self.hash_crc64 = hash_crc64 + self.version_id = version_id + self.callback_result = callback_result + + +class AbortMultipartUploadRequest(serde.RequestModel): + """The request for the AbortMultipartUpload operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "upload_id": {"tag": "input", "position": "query", "rename": "uploadId", "required": True}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + upload_id: str = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + upload_id (str, optional): The ID of the multipart upload task. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.upload_id = upload_id + self.request_payer = request_payer + + +class AbortMultipartUploadResult(serde.ResultModel): + """The result for the AbortMultipartUpload operation.""" + + +class ListMultipartUploadsRequest(serde.RequestModel): + """The request for the ListMultipartUploads operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "delimiter": {"tag": "input", "position": "query", "rename": "delimiter"}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "key_marker": {"tag": "input", "position": "query", "rename": "key-marker"}, + "max_uploads": {"tag": "input", "position": "query", "rename": "max-uploads", "type": "int"}, + "prefix": {"tag": "input", "position": "query", "rename": "prefix"}, + "upload_id_marker": {"tag": "input", "position": "query", "rename": "upload-id-marker"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + delimiter: Optional[str] = None, + encoding_type: Optional[str] = None, + key_marker: Optional[str] = None, + max_uploads: Optional[int] = None, + prefix: Optional[str] = None, + upload_id_marker: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket to create. + delimiter (str, optional): The character that is used to group objects by name. + If you specify the delimiter parameter in the request, the response contains + the CommonPrefixes parameter. The objects whose names contain the same string + from the prefix to the next occurrence of the delimiter are grouped + as a single result element in CommonPrefixes. + encoding_type (str, optional): The encoding type of the content in the response. Valid value: url + key_marker (str, optional): This parameter is used together with the upload-id-marker parameter to specify + the position from which the next list begins. + max_uploads (int, optional): The maximum number of multipart upload tasks that can be returned for the current request. + Default value: 1000. Maximum value: 1000. + prefix (str, optional): The prefix that the names of the returned objects must contain. + upload_id_marker (str, optional): The upload ID of the multipart upload task after which the list begins. + This parameter is used together with the key-marker parameter. + request_payer (str, optional): To indicate that the requester is aware that the request + and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.delimiter = delimiter + self.encoding_type = encoding_type + self.key_marker = key_marker + self.max_uploads = max_uploads + self.prefix = prefix + self.upload_id_marker = upload_id_marker + self.request_payer = request_payer + + +class Upload(serde.Model): + """The inforamtion about the upload task was initiated.""" + + def __init__( + self, + key: Optional[str] = None, + upload_id: Optional[str] = None, + initiated: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The name of the object for which a multipart upload task was initiated. + upload_id (str, optional): The ID of the multipart upload task. + initiated (str, optional): The time when the multipart upload task was initialized. + """ + super().__init__(**kwargs) + self.key = key + self.upload_id = upload_id + self.initiated = initiated + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "upload_id": {"tag": "xml", "rename": "UploadId"}, + "initiated": {"tag": "xml", "rename": "Initiated", "type": "datetime"}, + } + _xml_map = { + "name": "Upload" + } + + +class ListMultipartUploadsResult(serde.ResultModel): + """The result for the ListBuckets operation.""" + + _attribute_map = { + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "bucket": {"tag": "xml", "rename": "Bucket"}, + "key_marker": {"tag": "xml", "rename": "KeyMarker"}, + "upload_id_marker": {"tag": "xml", "rename": "UploadIdMarker"}, + "next_key_marker": {"tag": "xml", "rename": "NextKeyMarker"}, + "next_upload_id_marker": {"tag": "xml", "rename": "NextUploadIdMarker"}, + "delimiter": {"tag": "xml", "rename": "Delimiter"}, + "prefix": {"tag": "xml", "rename": "Prefix"}, + "max_uploads": {"tag": "xml", "rename": "MaxUploads", "type": "int"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "uploads": {"tag": "xml", "rename": "Upload", "type": "[Upload]"}, + } + + _dependency_map = { + "Upload": {"new": lambda: Upload()}, + } + + _xml_map = {"name":"ListMultipartUploadsResult"} + + def __init__( + self, + encoding_type: Optional[str] = None, + bucket: Optional[str] = None, + key_marker: Optional[str] = None, + upload_id_marker: Optional[str] = None, + next_key_marker: Optional[str] = None, + next_upload_id_marker: Optional[str] = None, + delimiter: Optional[str] = None, + prefix: Optional[str] = None, + max_uploads: Optional[int] = None, + is_truncated: Optional[bool] = None, + uploads: Optional[List[Upload]] = None, + **kwargs: Any + ) -> None: + """ + encoding_type (str, optional): The method used to encode the object name in the response. + If encoding-type is specified in the request, values of those elements including + Delimiter, KeyMarker, Prefix, NextKeyMarker, and Key are encoded in the returned result. + bucket (str, optional): The name of the bucket. + key_marker (str, optional): The name of the object that corresponds to the multipart upload task after which the list begins. + upload_id_marker (str, optional): The upload ID of the multipart upload task after which the list begins. + next_key_marker (str, optional): The upload ID of the multipart upload task after which the list begins. + next_upload_id_marker (str, optional): The NextUploadMarker value that is used for the UploadMarker value in + the next request if the response does not contain all required results. + delimiter (str, optional): The character that is used to group objects by name. + prefix (str, optional): The prefix contained in the returned object names. + max_uploads (int, optional): The maximum number of multipart upload tasks returned by OSS. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + uploads ([Upload], optional): The container that stores information about upload task. + """ + super().__init__(**kwargs) + self.encoding_type = encoding_type + self.bucket = bucket + self.key_marker = key_marker + self.upload_id_marker = upload_id_marker + self.next_key_marker = next_key_marker + self.next_upload_id_marker = next_upload_id_marker + self.delimiter = delimiter + self.prefix = prefix + self.max_uploads = max_uploads + self.is_truncated = is_truncated + self.uploads = uploads + +class ListPartsRequest(serde.RequestModel): + """The request for the ListParts operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "upload_id": {"tag": "input", "position": "query", "rename": "uploadId", "required": True}, + "encoding_type": {"tag": "input", "position": "query", "rename": "encoding-type"}, + "max_parts": {"tag": "input", "position": "query", "rename": "max-parts", "type": "int"}, + "part_mumber_marker": {"tag": "input", "position": "query", "rename": "part-number-marker"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + upload_id: str = None, + encoding_type: Optional[str] = None, + max_parts: Optional[int] = None, + part_mumber_marker: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + upload_id (str, required): The ID of the multipart upload task. + encoding_type (str, optional): The encoding type of the content in the response. Valid value: url + max_parts (int, optional): The maximum number of parts that can be returned by OSS. + Default value: 1000. Maximum value: 1000. + part_mumber_marker (str, optional): The position from which the list starts. + All parts whose part numbers are greater than the value of this parameter are listed. + request_payer (str, optional): To indicate that the requester is aware that the request + and data download will incur costs + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.upload_id = upload_id + self.encoding_type = encoding_type + self.max_parts = max_parts + self.part_mumber_marker = part_mumber_marker + self.request_payer = request_payer + + +class Part(serde.Model): + """The inforamtion about the uploaded part.""" + + def __init__( + self, + part_number: Optional[int] = None, + etag: Optional[str] = None, + last_modified: Optional[datetime.datetime] = None, + size: Optional[int] = None, + hash_crc64: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + part_number (int, optional): The number that identifies a part. + etag (str, optional): The ETag value of the content of the uploaded part. + last_modified (datetime, optional): The time when the part was uploaded. + size (int, optional): The size of the uploaded parts. + hash_crc64 (str, optional): The 64-bit CRC value of the object. + This value is calculated based on the ECMA-182 standard. + """ + super().__init__(**kwargs) + self.part_number = part_number + self.etag = etag + self.last_modified = last_modified + self.size = size + self.hash_crc64 = hash_crc64 + + _attribute_map = { + "part_number": {"tag": "xml", "rename": "PartNumber", "type": "int"}, + "etag": {"tag": "xml", "rename": "ETag"}, + "last_modified": {"tag": "xml", "rename": "LastModified", "type": "datetime"}, + "size": {"tag": "xml", "rename": "Size", "type": "int"}, + "hash_crc64": {"tag": "xml", "rename": "HashCrc64ecma"}, + } + _xml_map = { + "name": "Part" + } + + +class ListPartsResult(serde.ResultModel): + """The result for the ListParts operation.""" + + _attribute_map = { + "encoding_type": {"tag": "xml", "rename": "EncodingType"}, + "bucket": {"tag": "xml", "rename": "Bucket"}, + "key": {"tag": "xml", "rename": "Key"}, + "upload_id": {"tag": "xml", "rename": "UploadId"}, + "part_number_marker": {"tag": "xml", "rename": "PartNumberMarker", "type": "int"}, + "next_part_number_marker": {"tag": "xml", "rename": "NextPartNumberMarker", "type": "int"}, + "max_parts": {"tag": "xml", "rename": "MaxParts", "type": "int"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "client_encryption_key": {"tag": "xml", "rename": "ClientEncryptionKey"}, + "client_encryption_start": {"tag": "xml", "rename": "ClientEncryptionStart"}, + "client_encryption_cek_alg": {"tag": "xml", "rename": "ClientEncryptionCekAlg"}, + "client_encryption_wrap_alg": {"tag": "xml", "rename": "ClientEncryptionWrapAlg"}, + "client_encryption_data_size": {"tag": "xml", "rename": "ClientEncryptionDataSize", "type": "int"}, + "client_encryption_part_size": {"tag": "xml", "rename": "ClientEncryptionPartSize", "type": "int"}, + "parts": {"tag": "xml", "rename": "Part", "type": "[Part]"}, + } + + _dependency_map = { + "Part": {"new": lambda: Part()}, + } + + _xml_map = {"name":"ListPartsResult"} + + def __init__( + self, + encoding_type: Optional[str] = None, + bucket: Optional[str] = None, + key: Optional[str] = None, + upload_id: Optional[str] = None, + part_number_marker: Optional[int] = None, + next_part_number_marker: Optional[int] = None, + max_parts: Optional[int] = None, + is_truncated: Optional[bool] = None, + storage_class: Optional[str] = None, + client_encryption_key: Optional[str] = None, + client_encryption_start: Optional[str] = None, + client_encryption_cek_alg: Optional[str] = None, + client_encryption_wrap_alg: Optional[str] = None, + client_encryption_data_size: Optional[int] = None, + client_encryption_part_size: Optional[int] = None, + parts: Optional[List[Part]] = None, + **kwargs: Any + ) -> None: + """ + encoding_type (str, optional): The method used to encode the object name in the response. + If encoding-type is specified in the request, values of those elements including + Key are encoded in the returned result. + bucket (str, optional): The name of the bucket. + key (str, optional): The name of the object that corresponds to the multipart upload task after which the list begins. + upload_id (str, optional): The ID of the upload task. + part_number_marker (int, optional): The position from which the list starts. + All parts whose part numbers are greater than the value of this parameter are listed. + next_part_number_marker (int, optional): The NextPartNumberMarker value that is used for the PartNumberMarker value + in a subsequent request when the response does not contain all required results. + max_parts (int, optional): The maximum number of parts in the response. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + storage_class (str, optional): The storage class of the object. + client_encryption_key (str, optional): The encrypted data key. + The encrypted data key is a string encrypted by a customer master key and encoded in Base64. + Only available in client-side encryption. + client_encryption_start (str, optional): The initial value that is randomly generated for data encryption. + The initial value is is a string encrypted by a customer master key and encoded in Base64. + Only available in client-side encryption. + client_encryption_cek_alg (str, optional): The algorithm used to encrypt data. + Only available in client-side encryption. + client_encryption_wrap_alg (str, optional): The algorithm used to encrypt the data key. + Only available in client-side encryption. + client_encryption_data_size (str, optional): The total size of the data to encrypt for multipart upload when init_multipart is called. + Only available in client-side encryption. + client_encryption_part_size (str, optional): The size of each part to encrypt for multipart upload when init_multipart is called. + Only available in client-side encryption. + parts ([Part], optional): The container that stores information about uploaded part. + """ + super().__init__(**kwargs) + self.encoding_type = encoding_type + self.bucket = bucket + self.key = key + self.upload_id = upload_id + self.part_number_marker = part_number_marker + self.next_part_number_marker = next_part_number_marker + self.max_parts = max_parts + self.is_truncated = is_truncated + self.storage_class = storage_class + self.client_encryption_key = client_encryption_key + self.client_encryption_start = client_encryption_start + self.client_encryption_cek_alg = client_encryption_cek_alg + self.client_encryption_wrap_alg = client_encryption_wrap_alg + self.client_encryption_data_size = client_encryption_data_size + self.client_encryption_part_size = client_encryption_part_size + self.parts = parts + + +class PutSymlinkRequest(serde.RequestModel): + """The request for the PutSymlink operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "target": {"tag": "input", "position": "header", "rename": "x-oss-symlink-target", "required": True}, + "acl": {"tag": "input", "position": "header", "rename": "x-oss-object-acl"}, + "storage_class": {"tag": "input", "position": "header", "rename": "x-oss-storage-class"}, + "metadata": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + "forbid_overwrite": {"tag": "input", "position": "header", "rename": "x-oss-forbid-overwrite", "type": "bool"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + target: str = None, + acl: Optional[str] = None, + storage_class: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + forbid_overwrite: Optional[bool] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + target (str, required): The destination object to which the symbolic link points. + acl (str, optional): The access control list (ACL) of the object. + storage_class (str, optional): The storage class of the object. + metadata (MutableMapping,The metadata of the object that you want to upload. + forbid_overwrite (bool, optional): Specifies whether the object that is uploaded by calling the PutObject operation + overwrites an existing object that has the same name. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.target = target + self.acl = acl + self.storage_class = storage_class + self.metadata = metadata + self.forbid_overwrite = forbid_overwrite + self.request_payer = request_payer + + +class PutSymlinkResult(serde.ResultModel): + """The result for the PutSymlink operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + +class GetSymlinkRequest(serde.RequestModel): + """The request for the GetSymlink operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): VersionId used to reference a specific version of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class GetSymlinkResult(serde.ResultModel): + """The result for the GetSymlink operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "target": {"tag": "output", "position": "header", "rename": "x-oss-symlink-target"}, + "etag": {"tag": "output", "position": "header", "rename": "ETag"}, + "metadata": {"tag": "output", "position": "header", "rename": "x-oss-meta-", "type": "dict,usermeta"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + target: Optional[str] = None, + etag: Optional[str] = None, + metadata: Optional[MutableMapping] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + target (str, optional): Indicates the target object that the symbol link directs to. + etag (str, optional): The entity tag (ETag). + An ETag is created when an object is created to identify the content of the object. + metadata (MutableMapping, optional): A map of metadata to store with the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + self.target = target + self.etag = etag + self.metadata = metadata + +class Tag(serde.Model): + """The inforamtion about the tag.""" + + def __init__( + self, + key: Optional[str] = None, + value: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + key (str, optional): The key of the tag. + value (str, optional): The value of the tag. + """ + super().__init__(**kwargs) + self.key = key + self.value = value + + _attribute_map = { + "key": {"tag": "xml", "rename": "Key"}, + "value": {"tag": "xml", "rename": "Value"}, + } + _xml_map = { + "name": "Tag" + } + +class TagSet(serde.Model): + """The collection of tags.""" + + def __init__( + self, + tags: Optional[List[Tag]] = None, + **kwargs: Any + ) -> None: + """ + tags ([Tag], optional): A list of tags. + """ + super().__init__(**kwargs) + self.tags = tags + + _attribute_map = { + "tags": {"tag": "xml", "rename": "Tag", "type": "[Tag]"}, + } + + _dependency_map = { + "Tag": {"new": lambda: Tag()}, + } + + _xml_map = { + "name": "TagSet" + } + +class Tagging(serde.Model): + """The container used to store the collection of tags.""" + + def __init__( + self, + tag_set: Optional[TagSet] = None, + **kwargs: Any + ) -> None: + """ + tag_set (TagSet, optional): The collection of tags. + """ + super().__init__(**kwargs) + self.tag_set = tag_set + + _attribute_map = { + "tag_set": {"tag": "xml", "rename": "TagSet", "type": "TagSet"}, + } + + _dependency_map = { + "TagSet": {"new": lambda: TagSet()}, + } + + _xml_map = { + "name": "Tagging" + } + + +class PutObjectTaggingRequest(serde.RequestModel): + """The request for the PutObjectTagging operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "tagging": {"tag": "input", "position": "body", "rename": "Tagging", "type": "xml", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + tagging: Tagging = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + tagging (Tagging, required): The container used to store the collection of tags. + version_id (str, optional): Version of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.tagging = tagging + self.version_id = version_id + self.request_payer = request_payer + + +class PutObjectTaggingResult(serde.ResultModel): + """The result for the PutObjectTagging operation.""" + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + } + + def __init__( + self, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + + +class GetObjectTaggingRequest(serde.RequestModel): + """The request for the GetObjectTagging operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): VersionId used to reference a specific version of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class GetObjectTaggingResult(serde.ResultModel): + """The result for the GetObjectTagging operation.""" + + def __init__( + self, + version_id: Optional[str] = None, + tags: Optional[List[Tag]] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + tags ([Tag], optional): A list of tags. + """ + super().__init__(**kwargs) + self.version_id = version_id + self.tags = tags + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + "tags": {"tag": "xml", "rename": "TagSet.Tag", "type": "[Tag]"}, + } + + _dependency_map = { + "Tag": {"new": lambda: Tag()}, + } + + _xml_map = {"name":"Tagging"} + +class DeleteObjectTaggingRequest(serde.RequestModel): + """The request for the DeleteObjectTagging operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "version_id": {"tag": "input", "position": "query", "rename": "versionId"}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + version_id: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + version_id (str, optional): VersionId used to reference a specific version of the object. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.version_id = version_id + self.request_payer = request_payer + + +class DeleteObjectTaggingResult(serde.ResultModel): + """The result for the DeleteObjectTagging operation.""" + + def __init__( + self, + version_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + version_id (str, optional): Version of the object. + """ + super().__init__(**kwargs) + self.version_id = version_id + + _attribute_map = { + "version_id": {"tag": "output", "position": "header", "rename": "x-oss-version-id"}, + } + + +class ProcessObjectRequest(serde.RequestModel): + """The request for the ProcessObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "process": {"tag": "input", "position": "nop", "rename": "x-oss-process", "required": True}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + process: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + process (str, required): Image processing parameters. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.process = process + self.request_payer = request_payer + + +class ProcessObjectResult(serde.ResultModel): + """The result for the ProcessObject operation.""" + + def __init__( + self, + bucket: Optional[str] = None, + file_size: Optional[int] = None, + key: Optional[str] = None, + process_status: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, optional): The name of the bucket. + file_size (str, optional): The size of the proessed object. + key (str, optional): The name of the proessed object. + process_status (str, optional): The status. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.file_size = file_size + self.key = key + self.process_status = process_status + + _attribute_map = { + "bucket": {"tag": "json", "rename": "bucket"}, + "file_size": {"tag": "json", "rename": "fileSize", "type": "int"}, + "key": {"tag": "json", "rename": "object"}, + "process_status": {"tag": "json", "rename": "status"}, + } + +class AsyncProcessObjectRequest(serde.RequestModel): + """The request for the AsyncProcessObject operation.""" + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "process": {"tag": "input", "position": "nop", "rename": "x-async-oss-process", "required": True}, + "request_payer": {"tag": "input", "position": "header", "rename": "x-oss-request-payer"}, + } + + def __init__( + self, + bucket: str = None, + key: str = None, + process: Optional[str] = None, + request_payer: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + bucket (str, required): The name of the bucket. + key (str, required): The name of the object. + process (str, required): Image async processing parameters. + request_payer (str, optional): To indicate that the requester is aware that the request and data download will incur costs. + """ + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.process = process + self.request_payer = request_payer + + +class AsyncProcessObjectResult(serde.ResultModel): + """The result for the AsyncProcessObject operation.""" + + def __init__( + self, + event_id: Optional[str] = None, + task_id: Optional[int] = None, + process_request_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + Args: + event_id (str, optional): The id of event. + task_id (str, optional): The id of task. + process_request_id (str, optional): The request id of task. + """ + super().__init__(**kwargs) + self.event_id = event_id + self.task_id = task_id + self.process_request_id = process_request_id + + _attribute_map = { + "event_id": {"tag": "json", "rename": "EventId"}, + "task_id": {"tag": "json", "rename": "TaskId"}, + "process_request_id": {"tag": "json", "rename": "RequestId"}, + } diff --git a/alibabacloud_oss_v2/models/region.py b/alibabacloud_oss_v2/models/region.py new file mode 100644 index 0000000..6ced474 --- /dev/null +++ b/alibabacloud_oss_v2/models/region.py @@ -0,0 +1,84 @@ +"""Models for region operation APIs""" +# pylint: disable=too-few-public-methods, too-many-instance-attributes, unnecessary-lambda +# pylint: disable=super-init-not-called, too-many-lines, line-too-long, too-many-arguments +# pylint: disable=too-many-locals +from typing import Optional, Any, List +from .. import serde + +class DescribeRegionsRequest(serde.RequestModel): + """The request for the DescribeRegions operation.""" + + _attribute_map = { + "regions": {"tag": "input", "position": "query", "rename": "regions"}, + } + + def __init__( + self, + regions: Optional[str] = '', + **kwargs: Any + ) -> None: + """ + regions (str, optional): Regional information + """ + super().__init__(**kwargs) + self.regions = regions + +class RegionInfo(serde.Model): + """Regional information.""" + + _attribute_map = { + "region": {"tag": "xml", "rename": "Region"}, + "internet_endpoint": {"tag": "xml", "rename": "InternetEndpoint"}, + "internal_endpoint": {"tag": "xml", "rename": "InternalEndpoint"}, + "accelerate_endpoint": {"tag": "xml", "rename": "AccelerateEndpoint"}, + } + + _xml_map = { + "name": "RegionInfo" + } + + def __init__( + self, + region: Optional[str] = None, + internet_endpoint: Optional[str] = None, + internal_endpoint: Optional[str] = None, + accelerate_endpoint: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + region (str, optional): OSS dedicated region ID. + internet_endpoint (str, optional): External endpoint. + internal_endpoint (str, optional): Internal network endpoint. + accelerate_endpoint (str, optional): Transfer acceleration endpoint. + """ + super().__init__(**kwargs) + self.region = region + self.internet_endpoint = internet_endpoint + self.internal_endpoint = internal_endpoint + self.accelerate_endpoint = accelerate_endpoint + +class DescribeRegionsResult(serde.ResultModel): + """The result for the DescribeRegions operation.""" + + _attribute_map = { + "region_info": {"tag": "xml", "rename": "RegionInfo", "type": "[RegionInfo]"}, + } + + _dependency_map = { + "RegionInfo": {"new": lambda: RegionInfo()}, + } + + _xml_map = { + "name": "RegionInfoList" + } + + def __init__( + self, + region_info: Optional[List[RegionInfo]] = None, + **kwargs: Any + ) -> None: + """ + region_info (RegionInfo, optional): The result for the DescribeRegions operation. + """ + super().__init__(**kwargs) + self.region_info = region_info diff --git a/alibabacloud_oss_v2/models/service.py b/alibabacloud_oss_v2/models/service.py new file mode 100644 index 0000000..f0fd2c2 --- /dev/null +++ b/alibabacloud_oss_v2/models/service.py @@ -0,0 +1,143 @@ +"""Models for service operation APIs""" +# pylint: disable=too-few-public-methods, too-many-instance-attributes, unnecessary-lambda +# pylint: disable=super-init-not-called, too-many-lines, line-too-long, too-many-arguments +# pylint: disable=too-many-locals +import datetime +from typing import Optional, Any, List +from .. import serde +from .bucket_basic import Owner + + +class ListBucketsRequest(serde.RequestModel): + """The request for the ListBuckets operation.""" + + _attribute_map = { + "marker": {"tag": "input", "position": "query", "rename": "marker"}, + "max_keys": {"tag": "input", "position": "query", "rename": "max-keys", "type": "int"}, + "prefix": {"tag": "input", "position": "query", "rename": "prefix"}, + "resource_group_id": {"tag": "input", "position": "header", "rename": "x-oss-resource-group-id"}, + } + + def __init__( + self, + marker: Optional[str] = None, + max_keys: Optional[int] = None, + prefix: Optional[str] = None, + resource_group_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + marker (str, optional): The name of the bucket from which the list operation begins. + max_keys (int, optional): The maximum number of buckets that can be returned in the single query. + Valid values: 1 to 1000. + prefix (str, optional): The prefix that the names of returned buckets must contain. + Limits the response to keys that begin with the specified prefix + request_payer (str, optional): The ID of the resource group. + """ + super().__init__(**kwargs) + self.marker = marker + self.max_keys = max_keys + self.prefix = prefix + self.resource_group_id = resource_group_id + + +class BucketProperties(serde.Model): + """Stores the metadata of the bucket.""" + + _attribute_map = { + "name": {"tag": "xml", "rename": "Name"}, + "location": {"tag": "xml", "rename": "Location"}, + "creation_date": {"tag": "xml", "rename": "CreationDate", "type": "datetime"}, + "storage_class": {"tag": "xml", "rename": "StorageClass"}, + "extranet_endpoint": {"tag": "xml", "rename": "ExtranetEndpoint"}, + "intranet_endpoint": {"tag": "xml", "rename": "IntranetEndpoint"}, + "region": {"tag": "xml", "rename": "Region"}, + "resource_group_id": {"tag": "xml", "rename": "ResourceGroupId"}, + } + + def __init__( + self, + name: Optional[str] = None, + location: Optional[str] = None, + creation_date: Optional[datetime.datetime] = None, + storage_class: Optional[str] = None, + extranet_endpoint: Optional[str] = None, + intranet_endpoint: Optional[str] = None, + region: Optional[str] = None, + resource_group_id: Optional[str] = None, + **kwargs: Any + ) -> None: + """ + name (str, optional): The name of the bucket. + location (str, optional): The data center in which the bucket is located. + creation_date (datetime, optional): The time when the bucket was created. + storage_class (str, optional): The storage class of the bucket. + Valid values: Standard, IA, Archive, ColdArchive and DeepColdArchive. + extranet_endpoint (str, optional): The public endpoint used to access the bucket over the Internet. + intranet_endpoint (str, optional): The internal endpoint that is used to access the bucket from ECS instances + that reside in the same region as the bucket. + region (str, optional): The region in which the bucket is located. + resource_group_id (str, optional): The ID of the resource group to which the bucket belongs. + """ + super().__init__(**kwargs) + self.name = name + self.location = location + self.creation_date = creation_date + self.storage_class = storage_class + self.extranet_endpoint = extranet_endpoint + self.intranet_endpoint = intranet_endpoint + self.region = region + self.resource_group_id = resource_group_id + +class ListBucketsResult(serde.ResultModel): + """The result for the ListBuckets operation.""" + + _attribute_map = { + "prefix": {"tag": "xml", "rename": "Prefix"}, + "marker": {"tag": "xml", "rename": "Marker"}, + "max_keys": {"tag": "xml", "rename": "MaxKeys", "type": "int"}, + "is_truncated": {"tag": "xml", "rename": "IsTruncated", "type": "bool"}, + "next_marker": {"tag": "xml", "rename": "NextMarker"}, + "owner": {"tag": "xml", "rename": "Owner", "type": "Owner"}, + "buckets": {"tag": "xml", "rename": "Buckets/Bucket", "type": "[BucketProperties]"}, + } + + _dependency_map = { + "ObjectProperties": {"new": lambda: BucketProperties()}, + "Owner": {"new": lambda: Owner()}, + } + + _xml_map = {"name":"ListAllMyBucketsResult"} + + def __init__( + self, + prefix: Optional[str] = None, + marker: Optional[str] = None, + max_keys: Optional[int] = None, + is_truncated: Optional[bool] = None, + next_marker: Optional[str] = None, + owner: Optional[Owner] = None, + buckets: Optional[List[BucketProperties]] = None, + **kwargs: Any + ) -> None: + """ + prefix (str, optional): The prefix contained in the names of the returned bucket. + marker (str, optional): The name of the bucket after which the ListBuckets operation starts + max_keys (int, optional): The maximum number of buckets that can be returned for the request. + is_truncated (bool, optional): Indicates whether the returned results are truncated. + true indicates that not all results are returned this time. + false indicates that all results are returned this time. + next_marker (str, optional): The marker for the next ListBuckets request, which can be used + to return the remaining results. + owner (Owner, optional): The container that stores information about the bucket owner. + buckets ([BucketProperties], optional): The container that stores information about buckets. + """ + super().__init__(**kwargs) + self.prefix = prefix + self.marker = marker + self.max_keys = max_keys + self.is_truncated = is_truncated + self.next_marker = next_marker + self.owner = owner + self.buckets = buckets + diff --git a/alibabacloud_oss_v2/operations/__init__.py b/alibabacloud_oss_v2/operations/__init__.py new file mode 100644 index 0000000..e7fe948 --- /dev/null +++ b/alibabacloud_oss_v2/operations/__init__.py @@ -0,0 +1,5 @@ + +from .service import * +from .region import * +from .bucket_basic import * +from .object_basic import * \ No newline at end of file diff --git a/alibabacloud_oss_v2/operations/bucket_basic.py b/alibabacloud_oss_v2/operations/bucket_basic.py new file mode 100644 index 0000000..cac4f79 --- /dev/null +++ b/alibabacloud_oss_v2/operations/bucket_basic.py @@ -0,0 +1,478 @@ +"""APIs for bucket basic operation.""" +# pylint: disable=line-too-long + +from ..types import OperationInput, CaseInsensitiveDict +from .. import serde +from .. import serde_utils +from .. import models +from .._client import _SyncClientImpl + + +def put_bucket(client: _SyncClientImpl, request: models.PutBucketRequest, **kwargs) -> models.PutBucketResult: + """ + put bucket synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutBucketRequest): The request for the PutBucket operation. + + Returns: + PutBucketResult: The result for the PutBucket operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutBucket', + method='PUT', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutBucketResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody + ], + ) + + +def delete_bucket(client: _SyncClientImpl, request: models.DeleteBucketRequest, **kwargs) -> models.DeleteBucketResult: + """ + delete bucket synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (DeleteBucketRequest): The request for the DeleteBucket operation. + + Returns: + DeleteBucketResult: The result for the DeleteBucket operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='DeleteBucket', + method='DELETE', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.DeleteBucketResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody + ], + ) + + +def list_objects(client: _SyncClientImpl, request: models.ListObjectsRequest, **kwargs) -> models.ListObjectsResult: + """ + list objects synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListObjectsRequest): The request for the ListObjects operation. + + Returns: + ListObjectsResult: The result for the ListObjects operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListObjects', + method='GET', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/octet-stream', + }), + parameters={ + 'encoding-type': 'url', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListObjectsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) + +def put_bucket_acl(client: _SyncClientImpl, request: models.PutBucketAclRequest, **kwargs) -> models.PutBucketAclResult: + """ + put bucket acl + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutBucketAclRequest): The request for the PutBucketAcl operation. + + Returns: + PutBucketAclResult: The result for the PutBucketAcl operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutBucketAcl', + method='PUT', + parameters={ + 'acl': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutBucketAclResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody + ], + ) + + +def get_bucket_acl(client: _SyncClientImpl, request: models.GetBucketAclRequest, **kwargs) -> models.GetBucketAclResult: + """ + get bucket acl + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetBucketAclRequest): The request for the GetBucketAcl operation. + + Returns: + GetBucketAclResult: The result for the GetBucketAcl operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetBucketAcl', + method='GET', + parameters={ + 'acl': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetBucketAclResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) + + +def list_objects_v2(client: _SyncClientImpl, request: models.ListObjectsV2Request, **kwargs) -> models.ListObjectsV2Result: + """ + list objects synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListObjectsV2Request): The request for the ListObjectsV2 operation. + + Returns: + ListObjectsV2Result: The result for the ListObjectsV2 operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListObjectsV2', + method='GET', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/octet-stream', + }), + parameters={ + 'encoding-type': 'url', + 'list-type': 2, + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListObjectsV2Result(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) + +def get_bucket_stat(client: _SyncClientImpl, request: models.GetBucketStatRequest, **kwargs) -> models.GetBucketStatResult: + """ + GetBucketStat Queries the storage capacity of a specified bucket and the number of objects that are stored in the bucket. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetBucketStatRequest): The request for the GetBucketStat operation. + + Returns: + GetBucketStatResult: The result for the GetBucketStat operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetBucketStat', + method='GET', + parameters={ + 'stat': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetBucketStatResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) + + +def get_bucket_location(client: _SyncClientImpl, request: models.GetBucketLocationRequest, **kwargs) -> models.GetBucketLocationResult: + """ + GetBucketLocation Queries the region of an Object Storage Service (OSS) bucket. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetBucketLocationRequest): The request for the GetBucketLocation operation. + + Returns: + GetBucketLocationResult: The result for the GetBucketLocation operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetBucketLocation', + method='GET', + parameters={ + 'location': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetBucketLocationResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) + + +def get_bucket_info(client: _SyncClientImpl, request: models.GetBucketInfoRequest, **kwargs) -> models.GetBucketInfoResult: + """ + GetBucketInfo Queries information about a bucket. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetBucketInfoRequest): The request for the GetBucketInfo operation. + + Returns: + GetBucketInfoResult: The result for the GetBucketInfo operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetBucketInfo', + method='GET', + parameters={ + 'bucketInfo': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetBucketInfoResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) + + +def put_bucket_versioning(client: _SyncClientImpl, request: models.PutBucketVersioningRequest, **kwargs) -> models.PutBucketVersioningResult: + """ + PutBucketVersioning Configures the versioning state for a bucket. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutBucketVersioningRequest): The request for the PutBucketVersioning operation. + + Returns: + PutBucketVersioningResult: The result for the PutBucketVersioning operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutBucketVersioning', + method='PUT', + parameters={ + 'versioning': '', + }, + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutBucketVersioningResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody + ], + ) + + +def get_bucket_versioning(client: _SyncClientImpl, request: models.GetBucketVersioningRequest, **kwargs) -> models.GetBucketVersioningResult: + """ + GetBucketVersioning You can call this operation to query the versioning state of a bucket. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetBucketVersioningRequest): The request for the GetBucketVersioning operation. + + Returns: + GetBucketVersioningResult: The result for the GetBucketVersioning operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetBucketVersioning', + method='GET', + parameters={ + 'versioning': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetBucketVersioningResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) + + +def list_object_versions(client: _SyncClientImpl, request: models.ListObjectVersionsRequest, **kwargs) -> models.ListObjectVersionsResult: + """ + ListObjectVersions Lists the versions of all objects in a bucket, including delete markers. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListObjectVersionsRequest): The request for the ListObjectVersions operation. + + Returns: + ListObjectVersionsResult: The result for the ListObjectVersions operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListObjectVersions', + method='GET', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/octet-stream', + }), + parameters={ + 'versions': '', + }, + bucket=request.bucket, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListObjectVersionsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) diff --git a/alibabacloud_oss_v2/operations/object_basic.py b/alibabacloud_oss_v2/operations/object_basic.py new file mode 100644 index 0000000..cffd9fc --- /dev/null +++ b/alibabacloud_oss_v2/operations/object_basic.py @@ -0,0 +1,984 @@ +"""APIs for bucket basic operation.""" +# pylint: disable=line-too-long + +from ..types import OperationInput, CaseInsensitiveDict +from .. import serde +from .. import serde_utils +from .. import models +from .._client import _SyncClientImpl +from ..io_utils import StreamBodyReader + + +def put_object(client: _SyncClientImpl, request: models.PutObjectRequest, **kwargs) -> models.PutObjectResult: + """ + put object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutObjectRequest): The request for the PutObject operation. + + Returns: + PutObjectResult: The result for the PutObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutObject', + method='PUT', + bucket=request.bucket, + key=request.key, + ), + custom_serializer=[ + serde_utils.add_content_type, + serde_utils.add_progress, + serde_utils.add_crc_checker, + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody, + serde.deserialize_output_headers + ], + ) + + +def head_object(client: _SyncClientImpl, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """ + head object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (HeadObjectRequest): The request for the HeadObject operation. + + Returns: + HeadObjectResult: The result for the HeadObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='HeadObject', + method='HEAD', + bucket=request.bucket, + key=request.key, + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.HeadObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_discardbody, + serde.deserialize_output_headers + ], + ) + + +def get_object(client: _SyncClientImpl, request: models.GetObjectRequest, **kwargs) -> models.GetObjectResult: + """ + get object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetObjectRequest): The request for the GetObject operation. + + Returns: + GetObjectResult: The result for the GetObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetObject', + method='GET', + bucket=request.bucket, + key=request.key, + op_metadata={'response-stream':True} + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetObjectResult( + body=StreamBodyReader(op_output.http_response) + ), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def append_object(client: _SyncClientImpl, request: models.AppendObjectRequest, **kwargs) -> models.AppendObjectResult: + """ + append object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (AppendObjectRequest): The request for the AppendObject operation. + + Returns: + AppendObjectResult: The result for the AppendObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='AppendObject', + method='POST', + bucket=request.bucket, + key=request.key, + parameters={ + 'append': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_type, + serde_utils.add_progress, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.AppendObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + +def copy_object(client: _SyncClientImpl, request: models.CopyObjectRequest, **kwargs) -> models.CopyObjectResult: + """ + copy object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (CopyObjectRequest): The request for the CopyObject operation. + + Returns: + CopyObjectResult: The result for the CopyObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='CopyObject', + method='PUT', + bucket=request.bucket, + key=request.key, + headers=CaseInsensitiveDict({ + 'x-oss-copy-source': serde_utils.encode_copy_source(request), + }), + ), + custom_serializer=[ + serde_utils.add_content_md5, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.CopyObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers, + serde.deserialize_output_xmlbody + ], + ) + + +def delete_object(client: _SyncClientImpl, request: models.DeleteObjectRequest, **kwargs) -> models.DeleteObjectResult: + """ + copy object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (DeleteObjectRequest): The request for the DeleteObject operation. + + Returns: + DeleteObjectResult: The result for the DeleteObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='DeleteObject', + method='DELETE', + bucket=request.bucket, + key=request.key, + ), + custom_serializer=[ + serde_utils.add_content_md5, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.DeleteObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers, + ], + ) + + +def delete_multiple_objects(client: _SyncClientImpl, request: models.DeleteMultipleObjectsRequest, **kwargs) -> models.DeleteMultipleObjectsResult: + """ + delete multiple objects synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (DeleteMultipleObjectsRequest): The request for the DeleteMultipleObjects operation. + + Returns: + DeleteMultipleObjectsResult: The result for the DeleteMultipleObjects operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='DeleteMultipleObjects', + method='POST', + bucket=request.bucket, + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + parameters={ + 'delete': '', + 'encoding-type': 'url', + } + ), + custom_serializer=[ + serde_utils.serialize_delete_objects, + serde_utils.add_content_md5, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.DeleteMultipleObjectsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde.deserialize_output_headers, + serde_utils.deserialize_encode_type + ], + ) + + +def get_object_meta(client: _SyncClientImpl, request: models.HeadObjectRequest, **kwargs) -> models.GetObjectMetaResult: + """ + get object meta synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetObjectMetaRequest): The request for the GetObjectMeta operation. + + Returns: + GetObjectMetaResult: The result for the GetObjectMeta operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetObjectMeta', + method='HEAD', + bucket=request.bucket, + key=request.key, + parameters={ + 'objectMeta': '', + } + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetObjectMetaResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def restore_object(client: _SyncClientImpl, request: models.RestoreObjectRequest, **kwargs) -> models.RestoreObjectResult: + """ + restore object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (RestoreObjectRequest): The request for the RestoreObject operation. + + Returns: + RestoreObjectResult: The result for the RestoreObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='RestoreObject', + method='POST', + bucket=request.bucket, + key=request.key, + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/xml', + }), + parameters={ + 'restore': '', + } + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.RestoreObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def put_object_acl(client: _SyncClientImpl, request: models.PutObjectAclRequest, **kwargs) -> models.PutObjectAclResult: + """ + put object acl synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutObjectAclRequest): The request for the PutObjectAcl operation. + + Returns: + PutObjectAclResult: The result for the PutObjectAcl operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutObjectAcl', + method='PUT', + bucket=request.bucket, + key=request.key, + parameters={ + 'acl': '', + } + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutObjectAclResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def get_object_acl(client: _SyncClientImpl, request: models.GetObjectAclRequest, **kwargs) -> models.GetObjectAclResult: + """ + get object acl synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetObjectAclRequest): The request for the GetObjectAcl operation. + + Returns: + GetObjectAclResult: The result for the GetObjectAcl operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetObjectAcl', + method='GET', + bucket=request.bucket, + key=request.key, + parameters={ + 'acl': '', + } + ), + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetObjectAclResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde.deserialize_output_headers + ], + ) + + +def initiate_multipart_upload(client: _SyncClientImpl, request: models.InitiateMultipartUploadRequest, **kwargs) -> models.InitiateMultipartUploadResult: + """ + initiate multipart upload synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (InitiateMultipartUploadRequest): The request for the InitiateMultipartUpload operation. + + Returns: + InitiateMultipartUploadResult: The result for the InitiateMultipartUpload operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='InitiateMultipartUpload', + method='POST', + bucket=request.bucket, + key=request.key, + parameters={ + 'uploads': '', + 'encoding-type': 'url', + } + ), + custom_serializer=[ + serde_utils.add_content_md5, + serde_utils.add_content_type, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.InitiateMultipartUploadResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) + + +def upload_part(client: _SyncClientImpl, request: models.UploadPartRequest, **kwargs) -> models.UploadPartResult: + """ + upload part synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (UploadPartRequest): The request for the UploadPart operation. + + Returns: + UploadPartResult: The result for the UploadPart operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='UploadPart', + method='PUT', + bucket=request.bucket, + key=request.key, + ), + custom_serializer=[ + serde_utils.add_progress, + serde_utils.add_crc_checker, + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.UploadPartResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def upload_part_copy(client: _SyncClientImpl, request: models.UploadPartCopyRequest, **kwargs) -> models.UploadPartCopyResult: + """ + upload part copy synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (UploadPartCopyRequest): The request for the UploadPartCopy operation. + + Returns: + UploadPartCopyResult: The result for the UploadPartCopy operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='UploadPartCopy', + method='PUT', + bucket=request.bucket, + key=request.key, + headers=CaseInsensitiveDict({ + 'x-oss-copy-source': serde_utils.encode_copy_source(request), + }), + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.UploadPartCopyResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde.deserialize_output_headers + ], + ) + + +def complete_multipart_upload(client: _SyncClientImpl, request: models.CompleteMultipartUploadRequest, **kwargs) -> models.CompleteMultipartUploadResult: + """ + complete multipart upload synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (CompleteMultipartUploadRequest): The request for the CompleteMultipartUpload operation. + + Returns: + CompleteMultipartUploadResult: The result for the CompleteMultipartUpload operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='CompleteMultipartUpload', + method='POST', + bucket=request.bucket, + key=request.key, + parameters={ + 'encoding-type': 'url', + } + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.CompleteMultipartUploadResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde.deserialize_output_headers, + serde_utils.deserialize_encode_type + ], + ) + + +def abort_multipart_upload(client: _SyncClientImpl, request: models.AbortMultipartUploadRequest, **kwargs) -> models.AbortMultipartUploadResult: + """ + abort multipart upload synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (AbortMultipartUploadRequest): The request for the AbortMultipartUpload operation. + + Returns: + AbortMultipartUploadResult: The result for the AbortMultipartUpload operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='AbortMultipartUpload', + method='DELETE', + bucket=request.bucket, + key=request.key, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.AbortMultipartUploadResult(), + op_output=op_output, + ) + + + +def list_multipart_uploads(client: _SyncClientImpl, request: models.ListMultipartUploadsRequest, **kwargs) -> models.ListMultipartUploadsResult: + """ + list multipart uploads synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListMultipartUploadsRequest): The request for the ListMultipartUploads operation. + + Returns: + ListMultipartUploadsResult: The result for the ListMultipartUploads operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListMultipartUploads', + method='GET', + bucket=request.bucket, + parameters={ + 'encoding-type': 'url', + 'uploads': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListMultipartUploadsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) + + + +def list_parts(client: _SyncClientImpl, request: models.ListPartsRequest, **kwargs) -> models.ListPartsResult: + """ + list parts synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListPartsRequest): The request for the ListParts operation. + + Returns: + ListPartsResult: The result for the ListParts operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListParts', + method='GET', + bucket=request.bucket, + key=request.key, + parameters={ + 'encoding-type': 'url', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListPartsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde_utils.deserialize_encode_type + ], + ) + + + +def put_symlink(client: _SyncClientImpl, request: models.PutSymlinkRequest, **kwargs) -> models.PutSymlinkResult: + """ + put symlink synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutSymlinkRequest): The request for the PutSymlink operation. + + Returns: + PutSymlinkResult: The result for the PutSymlink operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutSymlink', + method='PUT', + bucket=request.bucket, + key=request.key, + parameters={ + 'symlink': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutSymlinkResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + + +def get_symlink(client: _SyncClientImpl, request: models.GetSymlinkRequest, **kwargs) -> models.GetSymlinkResult: + """ + get symlink synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetSymlinkRequest): The request for the GetSymlink operation. + + Returns: + GetSymlinkResult: The result for the GetSymlink operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetSymlink', + method='PUT', + bucket=request.bucket, + key=request.key, + parameters={ + 'symlink': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetSymlinkResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + + +def put_object_tagging(client: _SyncClientImpl, request: models.PutObjectTaggingRequest, **kwargs) -> models.PutObjectTaggingResult: + """ + put object tagging synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PutObjectTaggingRequest): The request for the PutObjectTagging operation. + + Returns: + PutObjectTaggingResult: The result for the PutObjectTagging operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='PutObjectTagging', + method='PUT', + bucket=request.bucket, + key=request.key, + parameters={ + 'tagging': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.PutObjectTaggingResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + + +def get_object_tagging(client: _SyncClientImpl, request: models.GetObjectTaggingRequest, **kwargs) -> models.GetObjectTaggingResult: + """ + get object tagging synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (GetObjectTaggingRequest): The request for the GetObjectTagging operation. + + Returns: + GetObjectTaggingResult: The result for the GetObjectTagging operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='GetObjectTagging', + method='GET', + bucket=request.bucket, + key=request.key, + parameters={ + 'tagging': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetObjectTaggingResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + serde.deserialize_output_headers + ], + ) + + + +def delete_object_tagging(client: _SyncClientImpl, request: models.DeleteObjectTaggingRequest, **kwargs) -> models.DeleteObjectTaggingResult: + """ + delete object tagging synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (DeleteObjectTaggingRequest): The request for the DeleteObjectTagging operation. + + Returns: + DeleteObjectTaggingResult: The result for the DeleteObjectTagging operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='DeleteObjectTagging', + method='DELETE', + bucket=request.bucket, + key=request.key, + parameters={ + 'tagging': '', + }, + ), + custom_serializer=[ + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.GetObjectTaggingResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers + ], + ) + + +def process_object(client: _SyncClientImpl, request: models.ProcessObjectRequest, **kwargs) -> models.ProcessObjectResult: + """ + process object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ProcessObjectRequest): The request for the ProcessObject operation. + + Returns: + ProcessObjectResult: The result for the ProcessObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ProcessObject', + method='POST', + bucket=request.bucket, + key=request.key, + parameters={ + 'x-oss-process': '', + }, + ), + custom_serializer=[ + serde_utils.add_process_action, + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ProcessObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers, + serde_utils.deserialize_process_body + ], + ) + + +def async_process_object(client: _SyncClientImpl, request: models.AsyncProcessObjectRequest, **kwargs) -> models.AsyncProcessObjectResult: + """ + async process object synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (AsyncProcessObjectRequest): The request for the AsyncProcessObject operation. + + Returns: + AsyncProcessObjectResult: The result for the AsyncProcessObject operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='AsyncProcessObject', + method='POST', + bucket=request.bucket, + key=request.key, + parameters={ + 'x-oss-async-process': '', + }, + ), + custom_serializer=[ + serde_utils.add_process_action, + serde_utils.add_content_md5 + ], + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.AsyncProcessObjectResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_headers, + serde_utils.deserialize_process_body + ], + ) diff --git a/alibabacloud_oss_v2/operations/region.py b/alibabacloud_oss_v2/operations/region.py new file mode 100644 index 0000000..6baeb58 --- /dev/null +++ b/alibabacloud_oss_v2/operations/region.py @@ -0,0 +1,40 @@ +"""APIs for region operation.""" +# pylint: disable=line-too-long + +from ..types import OperationInput +from .. import serde +from .. import models +from .._client import _SyncClientImpl + +def describe_regions(client: _SyncClientImpl, request: models.DescribeRegionsRequest, **kwargs) -> models.DescribeRegionsResult: + """ + Queries the endpoints of all regions supported by Object Storage Service (OSS) or a specific region, including public endpoints, internal endpoints, and acceleration endpoints. + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (DescribeRegionsRequest): The request for the DescribeRegions operation. + + Returns: + DescribeRegionsResult: The result for the DescribeRegions operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='DescribeRegions', + method='GET', + parameters={ + 'regions': '', + }, + ) + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.DescribeRegionsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody + ], + ) diff --git a/alibabacloud_oss_v2/operations/service.py b/alibabacloud_oss_v2/operations/service.py new file mode 100644 index 0000000..f9d1051 --- /dev/null +++ b/alibabacloud_oss_v2/operations/service.py @@ -0,0 +1,44 @@ +"""APIs for service operation.""" +# pylint: disable=line-too-long + +from ..types import OperationInput, CaseInsensitiveDict +from .. import serde +from .. import serde_utils +from .. import models +from .._client import _SyncClientImpl + +def list_buckets(client: _SyncClientImpl, request: models.ListBucketsRequest, **kwargs) -> models.ListBucketsResult: + """ + list buckets synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (ListBucketsRequest): The request for the ListBuckets operation. + + Returns: + ListBucketsResult: The result for the ListBuckets operation. + """ + + op_input = serde.serialize_input( + request=request, + op_input=OperationInput( + op_name='ListBuckets', + method='GET', + headers=CaseInsensitiveDict({ + 'Content-Type': 'application/octet-stream', + }), + ), + custom_serializer=[ + serde_utils.add_content_md5 + ] + ) + + op_output = client.invoke_operation(op_input, **kwargs) + + return serde.deserialize_output( + result=models.ListBucketsResult(), + op_output=op_output, + custom_deserializer=[ + serde.deserialize_output_xmlbody, + ], + ) diff --git a/alibabacloud_oss_v2/paginator.py b/alibabacloud_oss_v2/paginator.py new file mode 100644 index 0000000..8f95088 --- /dev/null +++ b/alibabacloud_oss_v2/paginator.py @@ -0,0 +1,321 @@ +"""Paginator for list operation.""" +import abc +import copy +from typing import Iterator, Any +from . import models + + +class ListObjectsAPIClient(abc.ABC): + """Abstract base class for list_objects client.""" + + @abc.abstractmethod + def list_objects(self, request: models.ListObjectsRequest, **kwargs) -> models.ListObjectsResult: + """Lists information about all objects in an Object Storage Service (OSS) bucket.""" + + +class ListObjectsPaginator: + """A paginator for ListObjects""" + + def __init__( + self, + client: ListObjectsAPIClient, + **kwargs: Any + ) -> None: + """ + client (ListObjectsAPIClient): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListObjectsRequest, **kwargs: Any) -> Iterator[models.ListObjectsResult]: + """Iterates over the objects. + + Args: + request (models.ListObjectsRequest): The request for the ListObjects operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListObjectsResult]: An iterator of ListObjectsResult from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_keys = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_objects(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.marker = result.next_marker + + def __repr__(self) -> str: + return "" + + +class ListObjectsV2APIClient(abc.ABC): + """Abstract base class for list_objects_v2 client.""" + + @abc.abstractmethod + def list_objects_v2(self, request: models.ListObjectsV2Request, **kwargs) -> models.ListObjectsV2Result: + """Lists information about all objects in an Object Storage Service (OSS) bucket.""" + + +class ListObjectsV2Paginator: + """A paginator for ListObjectsV2""" + + def __init__( + self, + client: ListObjectsV2APIClient, + **kwargs: Any + ) -> None: + """ + client (ListObjectsV2APIClient): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListObjectsV2Request, **kwargs: Any) -> Iterator[models.ListObjectsV2Result]: + """Iterates over the objects with v2. + + Args: + request (models.ListObjectsV2Request): The request for the ListObjectsV2 operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListObjectsV2Result]: An iterator of ListObjectsV2Result from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_keys = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_objects_v2(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.continuation_token = result.next_continuation_token + + def __repr__(self) -> str: + return "" + +class ListObjectVersionsAPIClient(abc.ABC): + """Abstract base class for list_object_versions client.""" + + @abc.abstractmethod + def list_object_versions(self, request: models.ListObjectVersionsRequest, **kwargs) -> models.ListObjectVersionsResult: + """Lists the versions of all objects in a bucket, including delete markers.""" + + +class ListObjectVersionsPaginator: + """A paginator for ListObjectVersions""" + + def __init__( + self, + client: ListObjectVersionsAPIClient, + **kwargs: Any + ) -> None: + """ + client (ListObjectVersionsAPIClient): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListObjectVersionsRequest, **kwargs: Any) -> Iterator[models.ListObjectVersionsResult]: + """Iterates over the object versions. + + Args: + request (models.ListObjectVersionsRequest): The request for the ListObjectVersions operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListObjectVersionsResult]: An iterator of ListObjectVersionsResult from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_keys = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_object_versions(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.key_marker = result.next_key_marker + req.version_id_marker = result.next_version_id_marker + + def __repr__(self) -> str: + return "" + +class ListBucketsAPIClient(abc.ABC): + """Abstract base class for list_buckets client.""" + + @abc.abstractmethod + def list_buckets(self, request: models.ListBucketsRequest, **kwargs) -> models.ListBucketsResult: + """Lists all buckets that belong to your Alibaba Cloud account.""" + + +class ListBucketsPaginator: + """A paginator for ListBuckets""" + + def __init__( + self, + client: ListBucketsAPIClient, + **kwargs: Any + ) -> None: + """ + client (ListBucketsAPIClient): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListBucketsRequest, **kwargs: Any) -> Iterator[models.ListBucketsResult]: + """Iterates over the buckets. + + Args: + request (models.ListBucketsRequest): The request for the ListBuckets operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListBucketsResult]: An iterator of ListBucketsResult from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_keys = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_buckets(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.marker = result.next_marker + + def __repr__(self) -> str: + return "" + + +class ListPartsAPIClient(abc.ABC): + """Abstract base class for list_parts client.""" + + @abc.abstractmethod + def list_parts(self, request: models.ListPartsRequest, **kwargs) -> models.ListPartsResult: + """Lists all parts that are uploaded by using a specified upload ID.""" + +class ListPartsPaginator: + """A paginator for ListParts""" + + def __init__( + self, + client: ListPartsAPIClient, + **kwargs: Any + ) -> None: + """ + client (_SyncClientImpl): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListPartsRequest, **kwargs: Any) -> Iterator[models.ListPartsResult]: + """Iterates over the parts. + + Args: + request (models.ListPartsRequest): The request for the ListParts operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListPartsResult]: An iterator of ListPartsResult from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_parts = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_parts(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.part_mumber_marker = result.next_part_number_marker + + def __repr__(self) -> str: + return "" + +class ListMultipartUploadsAPIClient(abc.ABC): + """Abstract base class for list_multipart_uploads client.""" + + @abc.abstractmethod + def list_multipart_uploads(self, request: models.ListMultipartUploadsRequest, **kwargs) -> models.ListMultipartUploadsResult: + """Lists all multipart upload tasks in progress. The tasks are not completed or canceled.""" + + +class ListMultipartUploadsPaginator: + """A paginator for ListMultipartUploads""" + + def __init__( + self, + client: ListMultipartUploadsAPIClient, + **kwargs: Any + ) -> None: + """ + client (ListMultipartUploadsAPIClient): A agent that sends the request. + limit (int, optional): The maximum number of items in the response. + """ + self._client = client + self._limit = kwargs.get('limit', None) + + def iter_page(self, request: models.ListMultipartUploadsRequest, **kwargs: Any) -> Iterator[models.ListMultipartUploadsResult]: + """Iterates over the objects. + + Args: + request (models.ListMultipartUploadsRequest): The request for the ListMultipartUploads operation. + limit (int, optional): The maximum number of items in the response. + + Yields: + Iterator[models.ListMultipartUploadsResult]: An iterator of ListMultipartUploadsResult from the response + """ + limit = kwargs.get('limit', self._limit) + req = copy.copy(request) + if limit is not None: + req.max_uploads = limit + + first_page = True + is_truncated = False + + while first_page or is_truncated: + result = self._client.list_multipart_uploads(req) + yield result + + first_page = False + is_truncated = result.is_truncated + req.key_marker = result.next_key_marker + req.upload_id_marker = result.next_upload_id_marker + + def __repr__(self) -> str: + return "" \ No newline at end of file diff --git a/alibabacloud_oss_v2/presigner.py b/alibabacloud_oss_v2/presigner.py new file mode 100644 index 0000000..a2788b7 --- /dev/null +++ b/alibabacloud_oss_v2/presigner.py @@ -0,0 +1,220 @@ +"""APIs for presign operation.""" +# pylint: disable=line-too-long +import datetime +from typing import Union, Optional, MutableMapping +from .types import OperationInput, OperationOutput, HttpClient, HttpRequest, HttpResponse +from . import serde +from . import models +from . import exceptions +from ._client import _SyncClientImpl +from .signer import SignerV4 + + +PresignRequest = Union[ + models.GetObjectRequest, + models.PutObjectRequest, + models.HeadObjectRequest, + models.InitiateMultipartUploadRequest, + models.UploadPartRequest, + models.CompleteMultipartUploadRequest, + models.AbortMultipartUploadRequest +] + + +class PresignResult: + """The result for the presign operation.""" + + def __init__( + self, + method: Optional[str] = None, + url: Optional[str] = None, + expiration: Optional[datetime.datetime] = None, + signed_headers: Optional[MutableMapping] = None, + ) -> None: + """ + Args: + method (str, optional): The HTTP method, which corresponds to the operation. + For example, the HTTP method of the GetObject operation is GET. + url (str, optional): The pre-signed URL. + expiration (datetime.datetime, optional): The time when the pre-signed URL expires. + signed_headers (MutableMapping, optional): The request headers specified in the request. + For example, if Content-Type is specified for PutObject, Content-Type is returned. + """ + self.method = method + self.url = url + self.expiration = expiration + self.signed_headers = signed_headers + + +class _nopResponse(HttpResponse): + def __init__(self, **kwargs) -> None: + super().__init__() + self._request = kwargs.pop("request") + + @property + def request(self) -> HttpRequest: + return self._request + + @property + def is_closed(self) -> bool: + return True + + @property + def is_stream_consumed(self) -> bool: + return True + + @property + def status_code(self) -> int: + return 200 + + @property + def headers(self): + return {} + + @property + def reason(self) -> str: + return "OK" + + @property + def content(self) -> bytes: + return b'' + + def __repr__(self) -> str: + return '_nopResponse' + + def __enter__(self) -> "_nopResponse": + return self + + def __exit__(self, *args) -> None: + self.close() + + def close(self) -> None: + pass + + def read(self) -> bytes: + return b'' + + def iter_bytes(self, **kwargs): + """iter bytes""" + return iter([]) + + +class _nopHttpClient(HttpClient): + def send(self, request: HttpRequest, **kwargs) -> HttpResponse: + return _nopResponse(request=request) + + def open(self) -> None: + pass + + def close(self) -> None: + pass + + +_presign_kwargs = { + 'http_client': _nopHttpClient(), + 'auth_method': 'query' +} + + +def presign_inner(client: _SyncClientImpl, request: PresignRequest, **kwargs) -> PresignResult: + """ + presign synchronously + + Args: + client (_SyncClientImpl): A agent that sends the request. + request (PresignRequest): The request for the Presign operation. + expires (datetime.timedelta, optional): The expiration duration for the generated presign url. + expiration (datetime.datetime, optional): The expiration time for the generated presign url. + + Returns: + PresignResult: The result for the Presign operation. + """ + + op_input = _serialize_input(request) + + # expiration + expires = kwargs.pop("expires", None) + expiration = kwargs.pop("expiration", None) + if expiration is not None: + op_input.op_metadata['expiration_time'] = expiration + elif expires is not None: + now = datetime.datetime.now(datetime.timezone.utc) + op_input.op_metadata['expiration_time'] = now + expires + + op_output = client.invoke_operation(op_input, **_presign_kwargs) + + return _deserialize_output(client, op_output) + + +def _serialize_input(request: PresignRequest) -> OperationInput: + op_input = OperationInput(op_name="", method="") + if isinstance(request, models.GetObjectRequest): + op_input.op_name = "GetObject" + op_input.method = "GET" + op_input.bucket = request.bucket + op_input.key = request.key + + elif isinstance(request, models.PutObjectRequest): + op_input.op_name = "PutObject" + op_input.method = "PUT" + op_input.bucket = request.bucket + op_input.key = request.key + + elif isinstance(request, models.HeadObjectRequest): + op_input.op_name = "HeadObject" + op_input.method = "HEAD" + op_input.bucket = request.bucket + op_input.key = request.key + + elif isinstance(request, models.InitiateMultipartUploadRequest): + op_input.op_name = "InitiateMultipartUpload" + op_input.method = "POST" + op_input.bucket = request.bucket + op_input.key = request.key + op_input.parameters = {"uploads": ""} + + elif isinstance(request, models.UploadPartRequest): + op_input.op_name = "UploadPart" + op_input.method = "PUT" + op_input.bucket = request.bucket + op_input.key = request.key + + elif isinstance(request, models.CompleteMultipartUploadRequest): + op_input.op_name = "CompleteMultipartUpload" + op_input.method = "POST" + op_input.bucket = request.bucket + op_input.key = request.key + + elif isinstance(request, models.AbortMultipartUploadRequest): + op_input.op_name = "AbortMultipartUpload" + op_input.method = "DELETE" + op_input.bucket = request.bucket + op_input.key = request.key + + else: + raise exceptions.ParamInvalidError(field='request') + + return serde.serialize_input(request, op_input) + + +def _deserialize_output(client: _SyncClientImpl, op_output: OperationOutput) -> PresignResult: + result = PresignResult() + result.method = op_output.http_response.request.method + result.url = op_output.http_response.request.url + result.expiration = op_output.op_metadata.get('expiration_time', None) + result.signed_headers = {} + + _options = getattr(client, '_options', None) + if _options: + s = getattr(_options, 'signer', None) + if s: + for k, v in op_output.http_response.request.headers.items(): + if s.is_signed_header(k): + result.signed_headers[k] = v + + if result.expiration is not None and isinstance(s, SignerV4): + now = datetime.datetime.now(datetime.timezone.utc) + if (result.expiration - now) > datetime.timedelta(days=7): + raise exceptions.PresignExpirationError() + + return result diff --git a/alibabacloud_oss_v2/progress.py b/alibabacloud_oss_v2/progress.py new file mode 100644 index 0000000..8ed7dea --- /dev/null +++ b/alibabacloud_oss_v2/progress.py @@ -0,0 +1,40 @@ +"""Progress for upload, download and copy""" +from typing import Optional + + +class Progress: + """_summary_ + """ + + def __init__( + self, + progress_fn, + total: Optional[int], + ) -> None: + self._progress_fn = progress_fn + self._total = total or -1 + self._written = 0 + self._lwritten = 0 + + def reset(self): + """_summary_ + """ + self._lwritten = self._written + self._written = 0 + + def write(self, s: bytes): + """_summary_ + """ + n = _len(s) + self._written = self._written + n + + if self._progress_fn is None or self._written < self._lwritten: + return + + self._progress_fn(n, self._written, self._total) + + +def _len(s): + if isinstance(s, int): + return 1 + return len(s) diff --git a/alibabacloud_oss_v2/retry/__init__.py b/alibabacloud_oss_v2/retry/__init__.py new file mode 100644 index 0000000..39212d1 --- /dev/null +++ b/alibabacloud_oss_v2/retry/__init__.py @@ -0,0 +1,16 @@ +from .retryer_impl import ( + NopRetryer, + StandardRetryer +) + +from .backoff import ( + BackoffDelayer, + FullJitterBackoff, + EqualJitterBackoff, + FixedDelayBackoff +) + + +from .error_retryable import ( + ErrorRetryable, +) diff --git a/alibabacloud_oss_v2/retry/backoff.py b/alibabacloud_oss_v2/retry/backoff.py new file mode 100644 index 0000000..b1da451 --- /dev/null +++ b/alibabacloud_oss_v2/retry/backoff.py @@ -0,0 +1,116 @@ +"""Modules for backoff """ + +import abc +import sys +import random +from math import log2 + +class BackoffDelayer(abc.ABC): + """Abstract base class for backoff delayer.""" + + @abc.abstractmethod + def backoff_delay(self, attempt: int, error: Exception) -> float: + """Returns the delay that should be used before retrying the attempt. + + Args: + attempt (int): current retry attempt + error (Exception): the error meets + + Returns: + float: delay duration in second. + """ + +class FixedDelayBackoff(BackoffDelayer): + """FixedDelayBackoff implements fixed backoff.""" + + def __init__(self, backoff:float) -> None: + """ + Args: + backoff (float): the delay duration in second + """ + self._backoff = backoff + + def backoff_delay(self, attempt: int, error: Exception) -> float: + """Returns the delay that should be used before retrying the attempt. + + Args: + attempt (int): current retry attempt + error (Exception): the error meets + + Returns: + float: delay duration in second. + """ + return self._backoff + + def __repr__(self) -> str: + return f"" + + +class FullJitterBackoff(BackoffDelayer): + """FullJitterBackoff implements capped exponential backoff with jitter. + [0.0, 1.0) * min(2 ^ attempts * baseDealy, maxBackoff) + """ + + def __init__(self, base_delay:float, max_backoff:float) -> None: + """ + Args: + base_delay (float): the base delay duration in second + max_backoff (float): the max duration in second + """ + self._base_delay = base_delay + self._max_backoff = max_backoff + self._attempt_celling = int(log2(float(sys.maxsize) / base_delay)) + + def backoff_delay(self, attempt: int, error: Exception) -> float: + """Returns the delay that should be used before retrying the attempt. + + Args: + attempt (int): current retry attempt + error (Exception): the error meets + + Returns: + float: delay duration in second. + """ + attempt = min(attempt, self._attempt_celling) + delay = min((self._base_delay * (1 << attempt)), self._max_backoff) + rand = random.uniform(0, 1) + return rand * delay + + def __repr__(self) -> str: + return f"" + + +class EqualJitterBackoff(BackoffDelayer): + """EqualJJitterBackoff implements equal jitter backoff. + ceil = min(2 ^ attempts * baseDealy, maxBackoff) + ceil/2 + [0.0, 1.0) *(ceil/2 + 1) + """ + + def __init__(self, base_delay:float, max_backoff:float) -> None: + """ + Args: + base_delay (float): the base delay duration in second + max_backoff (float): the max duration in second + """ + self._base_delay = base_delay + self._max_backoff = max_backoff + self._attempt_celling = int(log2(float(sys.maxsize) / base_delay)) + + def backoff_delay(self, attempt: int, error: Exception) -> float: + """Returns the delay that should be used before retrying the attempt. + + Args: + attempt (int): current retry attempt + error (Exception): the error meets + + Returns: + float: delay duration in second. + """ + attempt = min(attempt, self._attempt_celling) + delay = min((self._base_delay * (1 << attempt)), self._max_backoff) + half = delay/2 + rand = random.uniform(0, 1) + return half + rand * (half + 1) + + def __repr__(self) -> str: + return f"" diff --git a/alibabacloud_oss_v2/retry/error_retryable.py b/alibabacloud_oss_v2/retry/error_retryable.py new file mode 100644 index 0000000..0ab09b8 --- /dev/null +++ b/alibabacloud_oss_v2/retry/error_retryable.py @@ -0,0 +1,98 @@ +"""Modules for error retryable """ + +import abc +from .. import exceptions + + +class ErrorRetryable(abc.ABC): + """Abstract base class for backoff delayer.""" + + @abc.abstractmethod + def is_error_retryable(self, error: Exception) -> bool: + """Check whether the error is retryable. + + Args: + error (Exception): the error meets + + Returns: + bool: True if the error is retryable. + """ + + +class HTTPStatusCodeRetryable(ErrorRetryable): + """HTTPStatusCodeRetryable implements http status checker""" + + def __init__(self) -> None: + # 401(Unauthorized) 408(Request Timeout) 429(Rate exceeded) + self._status_code = set([401, 408, 429]) + + def is_error_retryable(self, error: Exception) -> bool: + """Check whether the error is retryable. + + Args: + error (Exception): the error meets + + Returns: + bool: True if the error is retryable. + """ + if isinstance(error, exceptions.ServiceError): + if error.status_code >= 500: + return True + if error.status_code in self._status_code: + return True + return False + + def __repr__(self) -> str: + return "" + + +class ServiceErrorCodeRetryable(ErrorRetryable): + """ServiceErrorCodeRetryable implements service's error code checker""" + + def __init__(self) -> None: + self._error_codes = set(["RequestTimeTooSkewed", "BadRequest"]) + + def is_error_retryable(self, error: Exception) -> bool: + """Check whether the error is retryable. + + Args: + error (Exception): the error meets + + Returns: + bool: True if the error is retryable. + """ + if isinstance(error, exceptions.ServiceError): + if error.code in self._error_codes: + return True + return False + + def __repr__(self) -> str: + return "" + +class ClientErrorRetryable(ErrorRetryable): + """ClientErrorRetryable implements client's error checker""" + + def __init__(self) -> None: + self._exceptions = set([ + exceptions.RequestError, + exceptions.ResponseError, + exceptions.InconsistentError + ]) + + + def is_error_retryable(self, error: Exception) -> bool: + """Check whether the error is retryable. + + Args: + error (Exception): the error meets + + Returns: + bool: True if the error is retryable. + """ + for e in self._exceptions: + if isinstance(error, e): + return True + return False + + def __repr__(self) -> str: + return "" diff --git a/alibabacloud_oss_v2/retry/retryer_impl.py b/alibabacloud_oss_v2/retry/retryer_impl.py new file mode 100644 index 0000000..cfb8b90 --- /dev/null +++ b/alibabacloud_oss_v2/retry/retryer_impl.py @@ -0,0 +1,67 @@ +"""Modules for retryer """ +from typing import Optional, List +from ..types import Retryer +from .. import defaults +from . import error_retryable +from . import backoff + +_default_error_retryables = [ + error_retryable.HTTPStatusCodeRetryable(), + error_retryable.ServiceErrorCodeRetryable(), + error_retryable.ClientErrorRetryable() +] + + +class NopRetryer(Retryer): + """nop retryer""" + + def is_error_retryable(self, error: Exception) -> bool: + return False + + def max_attempts(self) -> int: + return 1 + + def retry_delay(self, attempt: int, error: Exception) -> float: + raise NotImplementedError() + + +class StandardRetryer(Retryer): + """standard retryer""" + + def __init__( + self, + max_attempts: Optional[int] = None, + max_backoff: Optional[float] = None, + base_delay: Optional[float] = None, + error_retryables: Optional[List[error_retryable.ErrorRetryable]] = None, + backoff_delayer: Optional[backoff.BackoffDelayer] = None, + ) -> None: + """ + Args: + max_attempts (int, optional): max retry attempt + max_backoff (float, optional): the max duration in second. + base_delay (float, optional): the base delay duration in second. + error_retryables ([ErrorRetryable], optional): error retryables list. + backoff_delayer ([BackoffDelayer], optional): backoff delayer. + """ + super().__init__() + self._max_attempts = max_attempts or defaults.DEFAULT_MAX_ATTEMPTS + self._max_backoff = max_backoff or defaults.DEFAULT_MAX_BACKOFF_S + self._base_delay = base_delay or defaults.DEFAULT_BASE_DELAY_S + self._error_retryables = error_retryables or _default_error_retryables + if backoff_delayer is None: + backoff_delayer = backoff.FullJitterBackoff( + self._base_delay, self._max_backoff) + self._backoff_delayer = backoff_delayer + + def is_error_retryable(self, error: Exception) -> bool: + for r in self._error_retryables: + if r.is_error_retryable(error): + return True + return False + + def max_attempts(self) -> int: + return self._max_attempts + + def retry_delay(self, attempt: int, error: Exception) -> float: + return self._backoff_delayer.backoff_delay(attempt, error) diff --git a/alibabacloud_oss_v2/serde.py b/alibabacloud_oss_v2/serde.py new file mode 100644 index 0000000..44e1ef7 --- /dev/null +++ b/alibabacloud_oss_v2/serde.py @@ -0,0 +1,695 @@ +"""_summary_ +""" +import datetime +import sys +from enum import Enum +from typing import Dict, Any, Optional, List, MutableMapping, Mapping, cast +from email.utils import format_datetime, parsedate_tz +import xml.etree.ElementTree as ET +from . import exceptions +from .types import OperationInput, OperationOutput, CaseInsensitiveDict + +_model_allow_attribute_map = ["headers", "parameters", "payload"] + +ISO8601 = '%Y-%m-%dT%H:%M:%SZ' +ISO8601_MICRO = '%Y-%m-%dT%H:%M:%S.%fZ' + +class Model: + """Mixin for all client request body/response body models to support + serialization and deserialization. + """ + _dependency_map: Dict[str, Dict[str, Any]] = {} # use for deserialization + _attribute_map: Dict[str, Dict[str, Any]] = {} + + def __init__(self, **kwargs: Any) -> None: + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map and k not in _model_allow_attribute_map: + pass + else: + setattr(self, k, kwargs[k]) + self.__models = None + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + def __create_depend_object(self, name: str) -> "Model": # pylint: disable=unused-private-member + """Creats an instance of object from dependencies map, + + Args: + name (str): the name of a object type + + Returns: + Model | None: An instance of object or None + + """ + new = self._dependency_map.get(name, {}).get('new', None) + if new is not None: + return new() + + if self.__models is None: + str_models = self.__module__.rsplit(".", 1)[0] + self.__models = sys.modules[str_models] or {} + + class_obj = self.__models.__dict__.get(name, None) + if class_obj is not None: + return class_obj() + + return None + + +class RequestModel(Model): + """request body models to support serialization. + """ + + +class ResultModel(Model): + """response body models to support deserialization. + """ + + def __init__( + self, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.status = '' + self.status_code = 0 + self.request_id = '' + self.headers: MutableMapping[str, str] = {} + + +class _FixedOffset(datetime.tzinfo): + """Fixed offset in minutes east from UTC. + :param int offset: offset in minutes + """ + + def __init__(self, offset): + self.__offset = datetime.timedelta(minutes=offset) + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return f"" + + def dst(self, dt): + return datetime.timedelta(0) + + +def serialize_isotime(value: datetime.datetime) -> str: + """Serialize Datetime object into ISO-8601 formatted string + + Args: + value (datetime.datetime): value to be serialized + + Returns: + str: ISO-8601 formatted string, e.g 2014-05-15T11:18:32.000Z + """ + try: + value = value.astimezone(datetime.timezone.utc) + except ValueError: + # Before Python 3.8, this raised for a naive datetime. + pass + try: + if value.microsecond > 0: + return value.strftime(ISO8601_MICRO) + return value.strftime(ISO8601) + except ValueError: + return value.strftime(ISO8601) + + +def serialize_httptime(value: datetime.datetime) -> str: + """Serialize Datetime object into http time formatted string + + Args: + value (datetime.datetime): value to be serialized + + Returns: + str: http time formatted string, e.g Thu, 15 May 2014 11:18:32 GMT + """ + try: + value = value.astimezone(datetime.timezone.utc) + except ValueError: + # Before Python 3.8, this raised for a naive datetime. + pass + return format_datetime(value, True) + + +def serialize_unixtime(value: datetime.datetime) -> str: + """Serialize Datetime object into unix time formatted string + + Args: + value (datetime.datetime): value to be serialized + + Returns: + str: http time formatted string, e.g 1702743657 + """ + try: + value = value.astimezone(datetime.timezone.utc) + except ValueError: + # Before Python 3.8, this raised for a naive datetime. + pass + return str(int(value.timestamp())) + + +def deserialize_httptime(date_time: str) -> datetime.datetime: + """Deserialize http datetime formatted string into Datetime object. + """ + parsed_date = parsedate_tz(date_time) + if not parsed_date: + raise exceptions.DeserializationError( + error=f'Invalid HTTP datetime {date_time}') + tz_offset = cast(int, parsed_date[9]) + return datetime.datetime(*parsed_date[:6], tzinfo=_FixedOffset(tz_offset / 60)) + + +def deserialize_iso(date_time: str) -> datetime.datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + """ + if not date_time: + return None + if date_time[-1] == "Z": + delta = 0 + timestamp = date_time[:-1] + else: + timestamp = date_time[:-6] + sign, offset = date_time[-6], date_time[-5:] + delta = int(sign + offset[:1]) * 60 + int(sign + offset[-2:]) + + check_decimal = timestamp.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + timestamp = timestamp.replace(decimal_str, decimal_str[0:6]) + + if delta == 0: + tzinfo = datetime.timezone.utc + else: + tzinfo = datetime.timezone(datetime.timedelta(minutes=delta)) + + try: + deserialized = datetime.datetime.strptime( + timestamp, "%Y-%m-%dT%H:%M:%S.%f") + except ValueError: + deserialized = datetime.datetime.strptime( + timestamp, "%Y-%m-%dT%H:%M:%S") + + deserialized = deserialized.replace(tzinfo=tzinfo) + return deserialized + + +def deserialize_unixtime(date_time: str) -> datetime.datetime: + """Deserialize a datetime from a POSIX timestamp into Datetime object. + """ + try: + attr = int(date_time) + date_obj = datetime.datetime.fromtimestamp( + attr, datetime.timezone.utc) + except ValueError as err: + raise exceptions.DeserializationError( + error=f'Cannot deserialize {date_time} to unix datetime object.') from err + return date_obj + + +def deserialize_boolean(val: str): + """Deserialize string into a boolean value + + For strings, the value for True/False is case insensitive + """ + if val is None: + return False + return val.lower() == 'true' + + +def _serialize_xml_any(tag: str, value: Any, atype: str) -> ET.Element: + if isinstance(value, Model): + return _serialize_xml_model(value) + + if isinstance(value, datetime.datetime): + atypes = atype.split(',') + child = ET.Element(tag) + if 'httptime' in atypes: + child.text = serialize_httptime(value) + elif 'unixtime' in atypes: + child.text = serialize_unixtime(value) + else: + child.text = serialize_isotime(value) + return child + + if isinstance(value, Enum): + child = ET.Element(tag) + child.text = str(value.value) + return child + + if isinstance(value, bool): + child = ET.Element(tag) + child.text = str(value).lower() + return child + + # default is basic type + if isinstance(value, (str, int, float)): + child = ET.Element(tag) + child.text = str(value) + return child + + raise exceptions.SerializationError( + error=f'Unsupport type {type(value)}') + + +def _serialize_xml_model(obj, root: Optional[str] = None) -> ET.Element: + """_summary_ + + Args: + model (Model): _description_ + root (Optional[str], optional): _description_. Defaults to None. + + Returns: + ET.Element: _description_ + """ + + if root is not None and len(root) > 0: + name = root + else: + name = obj.__class__.__name__ + xml_map = getattr(obj, '_xml_map', None) + if xml_map is not None: + name = xml_map.get('name', name) + + elem = ET.Element(name) + + attributes = getattr(obj, '_attribute_map') + for attr, attr_desc in attributes.items(): + if attr_desc.get('tag', '') != 'xml': + continue + attr_value = getattr(obj, attr) + attr_key = attr_desc.get('rename', attr) + attr_type = attr_desc.get('type', '') + if attr_value is not None: + if isinstance(attr_value, Model): + model = cast(Model, attr_value) + elem.append(_serialize_xml_model(model)) + elif isinstance(attr_value, list): + elem.extend([_serialize_xml_any(attr_key, a, attr_type) + for a in attr_value]) + else: + elem.append(_serialize_xml_any( + attr_key, attr_value, attr_type)) + return elem + + +def _serialize_to_str(value: Any, atype: str) -> str: + if isinstance(value, datetime.datetime): + atypes = atype.split(',') + if 'httptime' in atypes: + return serialize_httptime(value) + if 'unixtime' in atypes: + return serialize_unixtime(value) + return serialize_isotime(value) + + if isinstance(value, Enum): + return str(value.value) + + if isinstance(value, bool): + return str(value).lower() + + # default is basic type + if isinstance(value, (str, int, float)): + return str(value) + + raise exceptions.SerializationError( + error=f'Unsupport type {type(value)}') + + +def _deserialize_xml_model(root: ET.Element, obj: Any) -> None: + """_summary_ + """ + attributes = getattr(obj, '_attribute_map') + for attr, attr_desc in attributes.items(): + if attr_desc.get('tag', '') != 'xml': + continue + attr_key = attr_desc.get('rename', attr) + attr_types = str(attr_desc.get('type', 'str')).split(',') + + if attr_types[0].startswith('[') and attr_types[0].endswith(']'): + #attr_types[0] = attr_types[0][1:].removesuffix(']') + attr_types[0] = attr_types[0][1:-1] + value = _deserialize_xml_iter( + obj, root.findall(attr_key), attr_types) + else: + value = _deserialize_xml_any(obj, root.find(attr_key), attr_types) + + if value is not None: + setattr(obj, attr, value) + + +def _deserialize_xml_iter(upper_obj: Model, elems: List[ET.Element], attr_types: List[str]) -> Any: + if elems is None or len(elems) == 0: + return None + return [_deserialize_xml_any(upper_obj, elem, attr_types) for elem in elems] + + +def _deserialize_xml_any(upper_obj: Model, elem: ET.Element, attr_types: List[str]) -> Any: + # if elem is None or elem.text is None: + # print(f'elem.tag={elem.tag}, elem.text={elem.text}, attr_types={attr_types}\n') + if elem is None: + return None + + attr_type = attr_types[0] + + if not attr_type.islower(): + obj = upper_obj._Model__create_depend_object( # pylint: disable=protected-access + attr_type) + if obj is None: + raise exceptions.DeserializationError( + error=f'Can not create object with {attr_type} type') + _deserialize_xml_model(elem, obj) + return obj + + if elem.text is None: + return None + + # basic type + if attr_type in ('str', ''): + return str(elem.text) + + if attr_type == 'bool': + return deserialize_boolean(elem.text) + + if elem.text == '': + return None + + if attr_type == 'int': + return int(elem.text) + if attr_type == 'float': + return float(elem.text) + if attr_type == 'datetime': + return _deserialize_datetime(elem.text, attr_types) + + raise exceptions.DeserializationError(error=f'Unsupport type {attr_type}') + + +def _deserialize_datetime(date_time: str, subtype: List[str]) -> datetime.datetime: + if 'httptime' in subtype: + return deserialize_httptime(date_time) + if 'unixtime' in subtype: + return deserialize_unixtime(date_time) + + return deserialize_iso(date_time) + + +def _deserialize_to_any(value: Optional[str], atype: str) -> Any: + if value is None: + return None + + if atype in ('str', ''): + return value + + if atype == 'bool': + return deserialize_boolean(value) + + if atype == 'int': + return int(value) + if atype == 'float': + return float(value) + if 'datetime' in atype: + return _deserialize_datetime(value, atype.split(',')) + + raise exceptions.DeserializationError(error=f'Unsupport type {atype}') + + +def serialize_xml(obj, root: Optional[str] = None) -> Any: + """_summary_ + + Args: + model (Model): _description_ + root (Optional[str], optional): _description_. Defaults to None. + + Returns: + bytes: _description_ + """ + + elem = _serialize_xml_model(obj, root) + return ET.tostring(elem, encoding='utf-8', method='xml') + + +def deserialize_xml(xml_data: Any, obj: Any, expect_tag: Optional[str] = None) -> None: + """_summary_ + """ + if not isinstance(obj, Model): + return + + root = ET.fromstring(xml_data) + if expect_tag is not None and len(expect_tag) > 0: + if root.tag != expect_tag: + raise exceptions.DeserializationError( + error=f'Expect root tag is {expect_tag}, gots {root.tag}') + + _deserialize_xml_model(root, obj) + + +def serialize_input(request: Model, op_input: OperationInput, + custom_serializer: Optional[List[Any]] = None) -> OperationInput: + """_summary_ + + Args: + request (Model): _description_ + op_input (OperationInput): _description_ + custom_serializer (Optional[List[Any]]): _description_ + + Raises: + exceptions.SerializationError: _description_ + exceptions.ParamRequiredError: _description_ + + Returns: + _type_: _description_ + """ + + if not isinstance(request, RequestModel): + raise exceptions.SerializationError( + error=f'request<{request.__class__}> is not subclass of serde.RequestModel') + + if op_input.headers is None: + op_input.headers = CaseInsensitiveDict() + + if op_input.parameters is None: + op_input.parameters = {} + + if hasattr(request, 'headers'): + headers = cast(MutableMapping[str, str], request.headers) + if len(headers) > 0: + for k, v in headers.items(): + op_input.headers[k] = v + + if hasattr(request, 'parameters'): + parameters = cast(Mapping[str, str], request.parameters) + if len(parameters) > 0: + for k, v in parameters.items(): + op_input.parameters[k] = v + + if hasattr(request, 'payload'): + op_input.body = request.payload + + attributes = getattr(request, '_attribute_map') + for attr, attr_desc in attributes.items(): + attr_value = getattr(request, attr) + + if attr_value is None: + if attr_desc.get('required', False) is True: + raise exceptions.ParamRequiredError(field=attr) + continue + + attr_pos = cast(str, attr_desc.get('position', '')) + attr_type = cast(str, attr_desc.get('type', '')) + attr_name = cast(str, attr_desc.get('rename', attr)) + if attr_pos == 'query': + op_input.parameters.update( + {attr_name: _serialize_to_str(attr_value, attr_type)}) + elif attr_pos == 'header': + if 'dict' in attr_type and isinstance(attr_value, dict): + op_input.headers.update( + {f'{attr_name}{k}': v for k,v in attr_value.items()}) + else: + op_input.headers.update( + {attr_name: _serialize_to_str(attr_value, attr_type)}) + elif attr_pos == 'body': + if 'xml' in attr_type: + op_input.body = serialize_xml( + attr_value, attr_name if len(attr_name) > 0 else None) + else: + op_input.body = attr_value + else: + # ignore + pass + + # custom serializer + custom_serializer = custom_serializer or [] + for serializer in custom_serializer: + serializer(request, op_input) + + return op_input + + +def deserialize_output(result: Model, op_output: OperationOutput, + custom_deserializer: Optional[List[Any]] = None) -> Model: + """_summary_ + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + custom_deserializer (Optional[List[Any]]): _description_ + + Returns: + Any: _description_ + """ + if not isinstance(result, ResultModel): + raise exceptions.DeserializationError( + error=f'result<{result.__class__}> is not subclass of serde.ResultModel') + + result.status = op_output.status or '' + result.status_code = op_output.status_code or 0 + result.headers = op_output.headers or CaseInsensitiveDict() + result.request_id = result.headers.get('x-oss-request-id', '') + + # custom deserializer + custom_deserializer = custom_deserializer or [] + for deserializer in custom_deserializer: + deserializer(result, op_output) + + return result + + +def deserialize_output_headers(result: Model, op_output: OperationOutput) -> Model: + """_summary_ + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + + Returns: + Any: _description_ + """ + attributes = getattr(result, '_attribute_map') + headers = op_output.headers or {} + dict_attrs=[] + for attr, attr_desc in attributes.items(): + if attr_desc.get('tag', '') != 'output': + continue + attr_key = attr_desc.get('rename', attr) + attr_type = attr_desc.get('type', 'str') + if 'dict' in attr_type: + dict_attrs.append(attr) + continue + value = _deserialize_to_any( + value=headers.get(attr_key, None), atype=attr_type) + if value is not None: + setattr(result, attr, value) + + for attr in dict_attrs: + attr_desc = attributes.get(attr) + attr_key = attr_desc.get('rename', attr) + dict_value = CaseInsensitiveDict() + for k in headers.keys(): + if k.lower().startswith(attr_key): + dict_value[k[len(attr_key):]] = headers.get(k) + if len(dict_value) > 0: + setattr(result, attr, dict_value) + + +def deserialize_output_xmlbody(result: Model, op_output: OperationOutput) -> Model: + """_summary_ + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + + Returns: + Any: _description_ + """ + xml_data = op_output.http_response.content + + if xml_data is None or len(xml_data) == 0: + return result + + # parser xml body + attributes = cast(Dict, getattr(result, '_attribute_map')) + xml_fields = [] + xml_roots = [] + for attr, attr_desc in attributes.items(): + if attr_desc.get('tag', '') == 'xml': + xml_fields.append(attr) + + if (attr_desc.get('tag', '') == 'output' and + attr_desc.get('position', '') == 'body' and + 'xml' in attr_desc.get('type', '')): + xml_roots.append(attr) + + if len(xml_fields) > 0: + xml_map = cast(Dict, getattr(result, '_xml_map', {})) + deserialize_xml(xml_data, result, expect_tag=xml_map.get('name', None)) + + elif len(xml_roots) > 0: + attr = xml_roots[0] + attr_desc = attributes.get(attr) + attr_types = cast(str, attr_desc.get('type')).split(',') + obj = result._Model__create_depend_object( # pylint: disable=protected-access + attr_types[0]) + if obj is None: + raise exceptions.DeserializationError( + error=f'Can not create object with {attr_types} type') + expect_tag = attr_desc.get('rename', None) + deserialize_xml(xml_data, obj, expect_tag=expect_tag) + setattr(result, attr, obj) + + return result + + +def deserialize_output_discardbody(result: Model, op_output: OperationOutput) -> Model: + """_summary_ + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + + Returns: + Any: _description_ + """ + _ = op_output.http_response.content + op_output.http_response.close() + return result + + +def copy_request(dst: RequestModel, src: RequestModel): + """_summary_ + + Args: + dst (RequestModel): _description_ + src (RequestModel): _description_ + """ + dst_attr_map = getattr(dst, '_attribute_map', None) + src_attr_map = getattr(src, '_attribute_map', None) + + if dst_attr_map is None or src_attr_map is None: + return + + for attr, _ in dst_attr_map.items(): + src_value = getattr(src, attr, None) + if src_value is None: + continue + setattr(dst, attr, src_value) diff --git a/alibabacloud_oss_v2/serde_utils.py b/alibabacloud_oss_v2/serde_utils.py new file mode 100644 index 0000000..b2429a0 --- /dev/null +++ b/alibabacloud_oss_v2/serde_utils.py @@ -0,0 +1,308 @@ +"""utils for api only""" + +import hashlib +import base64 +import json +from urllib.parse import unquote, quote +from typing import List, cast, Union, Dict +from .types import OperationInput, HttpResponse, OperationOutput +from . import serde +from . import utils +from . import exceptions +from . import progress +from . import crc +from .models import ( + ListObjectsResult, + ListObjectsV2Result, + ListObjectVersionsResult, + CopyObjectRequest, + UploadPartCopyRequest, + DeleteMultipleObjectsRequest, + DeleteMultipleObjectsResult, + InitiateMultipartUploadResult, + CompleteMultipartUploadResult, + ListMultipartUploadsResult, + ListPartsResult, + ProcessObjectRequest, + AsyncProcessObjectRequest +) + + +def add_content_type(_: serde.Model, op_input: OperationInput) -> OperationInput: + """ + Add content-type based on the file suffix when it does not exist. + """ + if op_input.headers.get('Content-Type', None) is not None: + return op_input + + op_input.headers.update({ + 'Content-Type': utils.guess_content_type(op_input.key, "application/octet-stream") + }) + + return op_input + + +def add_content_md5(_: serde.Model, op_input: OperationInput) -> OperationInput: + """ + Add content-md5 when it does not exist. + """ + if op_input.headers.get('Content-MD5', None) is not None: + return op_input + + if op_input.body is None: + md5 = '1B2M2Y8AsgTpgAmY7PhCfg==' + elif isinstance(op_input.body, (str, bytes)): + h = hashlib.md5() + h.update(op_input.body) + md5 = base64.b64encode(h.digest()).decode() + else: + raise exceptions.SerializationError( + error=f'add_content_md5 fail, not support instance <{op_input.body.__class__}>') + + op_input.headers.update({'Content-MD5': md5}) + return op_input + + +def add_progress(request: serde.Model, op_input: OperationInput) -> OperationInput: + """ + Add progress writer when progress_fn is set. + """ + + fn = getattr(request, 'progress_fn', None) + if fn is None: + return op_input + + trackers = cast(List, op_input.op_metadata.get( + 'opm-request-body-tracker', [])) + p = progress.Progress( + progress_fn=fn, + total=utils.guess_content_length(op_input.body) + ) + trackers.append(p) + op_input.op_metadata['opm-request-body-tracker'] = trackers + + return op_input + + +def add_crc_checker(_: serde.Model, op_input: OperationInput) -> OperationInput: + """ + Add crc writer and crc checker. + """ + + trackers = cast(List, op_input.op_metadata.get( + 'opm-request-body-tracker', [])) + p = crc.Crc64(init_crc=0) + trackers.append(p) + op_input.op_metadata['opm-request-body-tracker'] = trackers + + handlers = cast(List, op_input.op_metadata.get( + 'opm-response-handler', [])) + + def crc_checker_handler(response: HttpResponse): + scrc = response.headers.get('x-oss-hash-crc64ecma', None) + if scrc is None: + return + ccrc = str(p.sum64()) + + if scrc != ccrc: + raise exceptions.InconsistentError( + client_crc=ccrc, + server_crc=scrc + ) + + handlers.append(crc_checker_handler) + op_input.op_metadata['opm-response-handler'] = handlers + + return op_input + +def deserialize_encode_type(result: serde.Model, _: OperationOutput) -> serde.Model: + """ + do url decode + """ + + if not hasattr(result, 'encoding_type'): + raise exceptions.DeserializationError( + error=f'{result.__class__} has not encoding_type attribute') + + if result.encoding_type is None or result.encoding_type != 'url': + return result + + if isinstance(result, ListObjectsResult): + # fields + fields = ['prefix', 'marker', 'delimiter', 'next_marker'] + + # Contents.Key + if isinstance(result.contents, List): + for i, _ in enumerate(result.contents): + result.contents[i].key = unquote(result.contents[i].key) + + # CommonPrefixes.Prefix + if isinstance(result.common_prefixes, List): + for i, _ in enumerate(result.common_prefixes): + result.common_prefixes[i].prefix = unquote( + result.common_prefixes[i].prefix) + + elif isinstance(result, ListObjectsV2Result): + # fields + fields = ['prefix', 'start_after', 'continuation_token', 'delimiter', 'next_continuation_token'] + + # Contents.Key + if isinstance(result.contents, List): + for i, _ in enumerate(result.contents): + result.contents[i].key = unquote(result.contents[i].key) + + # CommonPrefixes.Prefix + if isinstance(result.common_prefixes, List): + for i, _ in enumerate(result.common_prefixes): + result.common_prefixes[i].prefix = unquote( + result.common_prefixes[i].prefix) + + elif isinstance(result, ListObjectVersionsResult): + # fields + fields = ['prefix', 'key_marker', 'delimiter', 'next_key_marker'] + + # Version.Key + if isinstance(result.version, List): + for i, _ in enumerate(result.version): + result.version[i].key = unquote(result.version[i].key) + + # DeleteMarker.Key + if isinstance(result.delete_marker, List): + for i, _ in enumerate(result.delete_marker): + result.delete_marker[i].key = unquote(result.delete_marker[i].key) + + # CommonPrefixes.Prefix + if isinstance(result.common_prefixes, List): + for i, _ in enumerate(result.common_prefixes): + result.common_prefixes[i].prefix = unquote( + result.common_prefixes[i].prefix) + + elif isinstance(result, DeleteMultipleObjectsResult): + # fields + fields = [] + + # deleted_objects.Key + if isinstance(result.deleted_objects, List): + for i, _ in enumerate(result.deleted_objects): + result.deleted_objects[i].key = unquote(result.deleted_objects[i].key) + + elif isinstance(result, InitiateMultipartUploadResult): + # fields + fields = ['key'] + + elif isinstance(result, CompleteMultipartUploadResult): + # fields + fields = ['key'] + + elif isinstance(result, ListMultipartUploadsResult): + # fields + fields = ['key_marker', 'next_key_marker', 'prefix', 'delimiter'] + + # Upload.Key + if isinstance(result.uploads, List): + for i, _ in enumerate(result.uploads): + result.uploads[i].key = unquote(result.uploads[i].key) + + elif isinstance(result, ListPartsResult): + # fields + fields = ['key'] + + else: + fields = [] + + for field in fields: + val = getattr(result, field) + if val is not None: + setattr(result, field, unquote(val)) + + return result + +def encode_copy_source(request: Union[CopyObjectRequest, UploadPartCopyRequest]) -> str: + """ + encode copy source parameter + """ + source = f'/{request.source_bucket or request.bucket}/{quote(request.source_key)}' + if request.source_version_id is not None: + source += f'?versionId={request.source_version_id}' + + return source + + +def serialize_delete_objects(request: serde.Model, op_input: OperationInput) -> OperationInput: + """ + serialize to Delete XML string + """ + if not isinstance(request, DeleteMultipleObjectsRequest): + raise exceptions.SerializationError(error=f'Unsupport type {type(request)}') + + xml = '' + if request.quiet is not None: + xml += f'{"true" if request.quiet else "false"}' + + if isinstance(request.objects, List): + for _, o in enumerate(request.objects): + xml += '' + key = utils.safety_str(o.key) + if len(key) > 0: + xml += f'{utils.escape_xml_value(key)}' + vid = utils.safety_str(o.version_id) + if len(vid) > 0: + xml += f'{vid}' + xml += '' + + xml += '' + + op_input.body = xml.encode() + + return op_input + +def add_process_action(request: serde.Model, op_input: OperationInput) -> OperationInput: + """ + Add process parameter to body. + """ + if not isinstance(request, ProcessObjectRequest, AsyncProcessObjectRequest): + raise exceptions.SerializationError(error=f'Unsupport type {type(request)}') + + attr_map = cast(Dict, getattr(request, '_attribute_map')) + attr_desc = cast(Dict, attr_map.get('process')) + key = attr_desc.get('rename', None) + + if key is None: + raise exceptions.SerializationError(error='process filed is invalid') + + op_input.body = f'{key}={request.process}' + + return op_input + + +def deserialize_process_body(result: serde.Model, op_output: OperationOutput) -> serde.Model: + """_summary_ + + Args: + result (Model): _description_ + op_output (OperationOutput): _description_ + + Returns: + Any: _description_ + """ + xml_data = op_output.http_response.content + + if xml_data is None or len(xml_data) == 0: + return result + + jo = json.loads(xml_data) + + if not isinstance(jo, Dict): + return result + + # parse json body + attributes = cast(Dict, getattr(result, '_attribute_map')) + for attr, attr_desc in attributes.items(): + if attr_desc.get('tag', '') != 'json': + continue + attr_key = attr_desc.get('rename', attr) + value = jo.get(attr_key, None) + if value is not None: + setattr(result, attr, value) + + return result diff --git a/alibabacloud_oss_v2/signer/__init__.py b/alibabacloud_oss_v2/signer/__init__.py new file mode 100644 index 0000000..3072e9c --- /dev/null +++ b/alibabacloud_oss_v2/signer/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + +# signer implement +from .v1 import SignerV1 +from .v4 import SignerV4 diff --git a/alibabacloud_oss_v2/signer/v1.py b/alibabacloud_oss_v2/signer/v1.py new file mode 100644 index 0000000..caa06fd --- /dev/null +++ b/alibabacloud_oss_v2/signer/v1.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +import base64 +import datetime +from typing import Optional +from email.utils import format_datetime +from urllib.parse import urlsplit, quote, unquote, SplitResult +from hashlib import sha1 +import hmac +from .. import exceptions +from ..types import SigningContext, Signer + +class SignerV1(Signer): + """_summary_ + """ + _subresource_key_set = frozenset( + ['acl', 'bucketInfo', 'location', 'stat', 'delete', 'append', + 'tagging', 'objectMeta', 'uploads', 'uploadId', 'partNumber', + 'security-token', 'position', 'response-content-type', 'response-content-language', + 'response-expires', 'response-cache-control', 'response-content-disposition', + 'response-content-encoding', 'restore', 'callback', 'callback-var', + 'versions', 'versioning', 'versionId', 'sequential', 'continuation-token', + 'regionList', 'cloudboxes', 'symlink', 'resourceGroup'] + ) + + def sign(self, signing_ctx: SigningContext) -> None: + if signing_ctx is None: + raise exceptions.ParamNullError(field="SigningContext") + + if signing_ctx.credentials is None or not signing_ctx.credentials.has_keys(): + raise exceptions.ParamNullOrEmptyError(field="SigningContext.credentials") + + if signing_ctx.request is None: + raise exceptions.ParamNullOrEmptyError(field="SigningContext.request") + + if signing_ctx.auth_method_query: + return self._auth_query(signing_ctx) + + return self._auth_header(signing_ctx) + + def _auth_header(self, signing_ctx: SigningContext) -> None: + request = signing_ctx.request + cred = signing_ctx.credentials + + #Date + if signing_ctx.signing_time is None: + datetime_now = datetime.datetime.now(datetime.timezone.utc) + datetime_now = datetime_now + datetime.timedelta(seconds= (signing_ctx.clock_offset or 0)) + else: + datetime_now = signing_ctx.signing_time.astimezone(datetime.timezone.utc) + + datetime_now_rfc2822 = format_datetime(datetime_now, True) + request.headers.update({'Date':datetime_now_rfc2822}) + + #Credentials information + if cred.security_token is not None: + request.headers.update({'security-token':cred.security_token}) + + #string to sign + string_to_sign = self._calc_string_to_sign(signing_ctx=signing_ctx) + #print('string_to_sign:{}\n'.format(string_to_sign)) + + #signature + signature = self._calc_signature( + access_key_secrect=cred.access_key_secret, + string_to_sign=string_to_sign) + + #credential header + credential_header = f'OSS {cred.access_key_id}:{signature}' + + request.headers.update({'Authorization':credential_header}) + + signing_ctx.string_to_sign = string_to_sign + signing_ctx.signing_time = datetime_now + + + def _auth_query(self, signing_ctx: SigningContext) -> None: + request = signing_ctx.request + cred = signing_ctx.credentials + + #Date + if signing_ctx.signing_time is None: + datetime_now = datetime.datetime.now(datetime.timezone.utc) + else: + datetime_now = signing_ctx.signing_time.astimezone(datetime.timezone.utc) + + if signing_ctx.expiration_time is None: + expiration_time = datetime.datetime.now( + datetime.timezone.utc) + datetime.timedelta(minutes=15) + else: + expiration_time = signing_ctx.expiration_time.astimezone(datetime.timezone.utc) + + expires = str(int(expiration_time.timestamp())) + + encoded_pairs = {} + parts = urlsplit(request.url) + if parts.query: + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + encoded_pairs[key] = value + + encoded_pairs.pop('Signature', None) + encoded_pairs.pop('security-token', None) + encoded_pairs.update( + { + 'OSSAccessKeyId': cred.access_key_id, + 'Expires': expires, + } + ) + if cred.security_token is not None: + encoded_pairs.update({'security-token': quote(cred.security_token, safe='')}) + + query = [] + for key, value in encoded_pairs.items(): + if value: + query.append(f'{key}={value}') + else: + query.append(f'{key}') + + parts = SplitResult(parts.scheme, parts.netloc, parts.path, '&'.join(query), parts.fragment) + request.url = parts.geturl() + + #string to sign + string_to_sign = self._calc_string_to_sign(signing_ctx=signing_ctx, date=expires) + + #signature + signature = self._calc_signature( + access_key_secrect=cred.access_key_secret, + string_to_sign=string_to_sign) + + request.url = request.url + f'&Signature={quote(signature, safe="")}' + + # print('string_to_sign:{}\n'.format(string_to_sign)) + signing_ctx.string_to_sign = string_to_sign + signing_ctx.signing_time = datetime_now + signing_ctx.expiration_time = expiration_time + + def _calc_string_to_sign(self, signing_ctx: SigningContext, date: Optional[str] = None) -> str: + """ + Canonical Request + HTTP Verb + "\n" + + Content-MD5 + "\n" + + Content-Type + "\n" + + Date + "\n" + + CanonicalizedOSSHeaders + "\n" + + CanonicalizedResource + """ + request = signing_ctx.request + + # canonical uri + uri = '/' + if signing_ctx.bucket is not None: + uri = uri + signing_ctx.bucket + '/' + if signing_ctx.key is not None: + uri = uri + signing_ctx.key + canonical_uri = uri + + # canonical query + canonical_query = '' + parts = urlsplit(request.url) + + if parts.query: + key_val_pairs = [] + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + key = unquote(key) + value = unquote(value) + if key in self._subresource_key_set: + key_val_pairs.append((key, value)) + sorted_key_vals = [] + for key, value in sorted(key_val_pairs): + if len(value) > 0: + sorted_key_vals.append(f'{key}={value}') + else: + sorted_key_vals.append(f'{key}') + if key_val_pairs: + canonical_query = '?' + '&'.join(sorted_key_vals) + else: + canonical_query = '' + + canonical_resource = canonical_uri + canonical_query + + #canonical headers + canon_headers = [] + for k, v in request.headers.items(): + lower_key = k.lower() + if _is_sign_header(lower_key): + canon_headers.append((lower_key, v)) + canon_headers.sort(key=lambda x: x[0]) + canonical_headers = ''.join(v[0] + ':' + v[1] + '\n' for v in canon_headers) + + content_md5 = request.headers.get('content-md5', '') + content_type = request.headers.get('content-type', '') + + if date is None: + date = request.headers.get('x-oss-date', '') or request.headers.get('date', '') + + return '{}\n{}\n{}\n{}\n{}'.format( + request.method, + content_md5, + content_type, + date, + canonical_headers + canonical_resource) + + def _calc_signature(self, access_key_secrect:str, string_to_sign:str) -> str: + h = hmac.new(access_key_secrect.encode(), string_to_sign.encode(), sha1) + return base64.b64encode(h.digest()).decode('utf-8') + + @staticmethod + def is_signed_header(h: str) -> bool: + """Determines the header is a signed header + """ + return _is_default_sign_header(h.lower()) + + +def _is_default_sign_header(key: str) -> bool: + if key.startswith('x-oss-'): + return True + + if key in ['content-type', 'content-md5', 'date']: + return True + + return False + + +def _is_sign_header(key: str) -> bool: + if key is not None: + if key.startswith('x-oss-'): + return True + + return False diff --git a/alibabacloud_oss_v2/signer/v4.py b/alibabacloud_oss_v2/signer/v4.py new file mode 100644 index 0000000..fdb7992 --- /dev/null +++ b/alibabacloud_oss_v2/signer/v4.py @@ -0,0 +1,332 @@ +"""_summary_ +""" +import datetime +from email.utils import format_datetime +from typing import Optional, Set +from urllib.parse import urlsplit, quote, SplitResult +from hashlib import sha256 +import hmac + +from .. import exceptions +from ..types import HttpRequest, SigningContext, Signer + + +class SignerV4(Signer): + """_summary_ + """ + + def sign(self, signing_ctx: SigningContext) -> None: + if signing_ctx is None: + raise exceptions.ParamNullError(field="SigningContext") + + if signing_ctx.credentials is None or not signing_ctx.credentials.has_keys(): + raise exceptions.ParamNullOrEmptyError( + field="SigningContext.credentials") + + if signing_ctx.request is None: + raise exceptions.ParamNullOrEmptyError( + field="SigningContext.request") + + if signing_ctx.auth_method_query: + return self._auth_query(signing_ctx) + + return self._auth_header(signing_ctx) + + def _auth_header(self, signing_ctx: SigningContext) -> None: + request = signing_ctx.request + cred = signing_ctx.credentials + + # Date + if signing_ctx.signing_time is None: + datetime_now = datetime.datetime.now(datetime.timezone.utc) + datetime_now = datetime_now + \ + datetime.timedelta(seconds=(signing_ctx.clock_offset or 0)) + else: + datetime_now = signing_ctx.signing_time.astimezone( + datetime.timezone.utc) + + datetime_now_iso8601 = datetime_now.strftime('%Y%m%dT%H%M%SZ') + datetime_now_rfc2822 = format_datetime(datetime_now, True) + date_now_iso8601 = datetime_now_iso8601[:8] + request.headers.update({'x-oss-date': datetime_now_iso8601}) + request.headers.update({'Date': datetime_now_rfc2822}) + + # Credentials information + if cred.security_token is not None: + request.headers.update( + {'x-oss-security-token': cred.security_token}) + + # Other Headers + request.headers.update({'x-oss-content-sha256': 'UNSIGNED-PAYLOAD'}) + + # Scope + region = signing_ctx.region or '' + product = signing_ctx.product or '' + scope = self._build_scope( + date=date_now_iso8601, region=region, product=product) + + # additional headers + additional_headers = self._common_additional_headers( + request, signing_ctx.additional_headers) + + # Canonical request + canonical_request = self._calc_canonical_request( + signing_ctx=signing_ctx, additional_headers=additional_headers) + + # string to sign + string_to_sign = self._calc_string_to_sign( + datetime_now_iso8601, scope, canonical_request) + + # print('\ncanonical_request:{}\n'.format(canonical_request)) + # print('string_to_sign:{}\n'.format(string_to_sign)) + + # signature + signature = self._calc_signature( + access_key_secrect=cred.access_key_secret, + date=date_now_iso8601, + region=region, + product=product, + string_to_sign=string_to_sign) + + # credential header + credential_header = f'OSS4-HMAC-SHA256 Credential={cred.access_key_id}/{scope}' + if len(additional_headers) > 0: + credential_header = f'{credential_header},AdditionalHeaders={";".join(additional_headers)}' + credential_header = f'{credential_header},Signature={signature}' + + request.headers.update({'Authorization': credential_header}) + + signing_ctx.string_to_sign = string_to_sign + signing_ctx.signing_time = datetime_now + + def _auth_query(self, signing_ctx: SigningContext) -> None: + request = signing_ctx.request + cred = signing_ctx.credentials + + # Date + if signing_ctx.signing_time is None: + datetime_now = datetime.datetime.now(datetime.timezone.utc) + else: + datetime_now = signing_ctx.signing_time.astimezone( + datetime.timezone.utc) + + if signing_ctx.expiration_time is None: + expiration_time = datetime.datetime.now( + datetime.timezone.utc) + datetime.timedelta(minutes=15) + else: + expiration_time = signing_ctx.expiration_time.astimezone( + datetime.timezone.utc) + + datetime_now_iso8601 = datetime_now.strftime('%Y%m%dT%H%M%SZ') + date_now_iso8601 = datetime_now_iso8601[:8] + expires = (expiration_time - datetime_now).seconds + + # Scope + region = signing_ctx.region or '' + product = signing_ctx.product or '' + scope = self._build_scope( + date=date_now_iso8601, region=region, product=product) + + # additional headers + additional_headers = self._common_additional_headers( + request, signing_ctx.additional_headers) + + # credentials information + encoded_pairs = {} + parts = urlsplit(request.url) + if parts.query: + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + encoded_pairs[key] = value + + encoded_pairs.pop('x-oss-signature', None) + encoded_pairs.pop('x-oss-security-token', None) + encoded_pairs.pop('x-oss-additional-headers', None) + encoded_pairs.update( + { + 'x-oss-signature-version': 'OSS4-HMAC-SHA256', + 'x-oss-date': datetime_now_iso8601, + 'x-oss-expires': str(expires), + 'x-oss-credential': quote(f'{cred.access_key_id}/{scope}', safe='') + } + ) + if cred.security_token is not None: + encoded_pairs.update( + {'x-oss-security-token': quote(cred.security_token, safe='')}) + + if len(additional_headers) > 0: + encoded_pairs.update( + {'x-oss-additional-headers': quote(';'.join(additional_headers), safe='')}) + + query = [] + for key, value in encoded_pairs.items(): + if value: + query.append(f'{key}={value}') + else: + query.append(f'{key}') + + parts = SplitResult(parts.scheme, parts.netloc, + parts.path, '&'.join(query), parts.fragment) + request.url = parts.geturl() + + # print('\nrequest.url:{}'.format(request.url)) + + # Canonical request + canonical_request = self._calc_canonical_request( + signing_ctx=signing_ctx, additional_headers=additional_headers) + + # string to sign + string_to_sign = self._calc_string_to_sign( + datetime_now_iso8601, scope, canonical_request) + + # signature + signature = self._calc_signature( + access_key_secrect=cred.access_key_secret, + date=date_now_iso8601, + region=region, + product=product, + string_to_sign=string_to_sign) + + request.url = request.url + f'&x-oss-signature={quote(signature, safe="")}' + + # print('\ncanonical_request:{}\n'.format(canonical_request)) + # print('string_to_sign:{}\n'.format(string_to_sign)) + signing_ctx.string_to_sign = string_to_sign + signing_ctx.signing_time = datetime_now + signing_ctx.expiration_time = expiration_time + + def _build_scope(self, date: str, region: str, product: str) -> str: + return f'{date}/{region}/{product}/aliyun_v4_request' + + def _common_additional_headers(self, request: HttpRequest, + additional_headers: Optional[Set[str]] = None + ) -> Set[str]: + keys = set() + if additional_headers is None or request is None: + return keys + + for k in additional_headers: + lk = k.lower() + if _is_default_sign_header(lk): + continue + elif len(request.headers.get(lk, '')) > 0: + keys.add(lk) + + keys = sorted(keys) + return keys + + def _calc_canonical_request(self, + signing_ctx: SigningContext, + additional_headers: Optional[Set[str]] = None + ) -> str: + """ + Canonical Request + HTTP Verb + "\n" + + Canonical URI + "\n" + + Canonical Query String + "\n" + + Canonical Headers + "\n" + + Additional Headers + "\n" + + Hashed PayLoad + """ + request = signing_ctx.request + + # canonical uri + uri = '/' + if signing_ctx.bucket is not None: + uri = uri + signing_ctx.bucket + '/' + if signing_ctx.key is not None: + uri = uri + signing_ctx.key + canonical_uri = quote(uri, safe='/') + + # canonical query + canonical_query = '' + parts = urlsplit(request.url) + if parts.query: + key_val_pairs = [] + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + key_val_pairs.append((key, value)) + sorted_key_vals = [] + for key, value in sorted(key_val_pairs): + if len(value) > 0: + sorted_key_vals.append(f'{key}={value}') + else: + sorted_key_vals.append(f'{key}') + canonical_query = '&'.join(sorted_key_vals) + + # canonical headers + canon_headers = [] + for k, v in request.headers.items(): + lower_key = k.lower() + if _is_sign_header(lower_key, additional_headers): + canon_headers.append((lower_key, v)) + canon_headers.sort(key=lambda x: x[0]) + canonical_headers = ''.join( + v[0] + ':' + v[1] + '\n' for v in canon_headers) + + # canonical additional Headers + canonical_additional_headers = ';'.join(additional_headers) + + # hash payload + hash_payload = request.headers.get( + 'x-oss-content-sha256', 'UNSIGNED-PAYLOAD') + + return f'{request.method}\n{canonical_uri}\n{canonical_query}\n{canonical_headers}\n{canonical_additional_headers}\n{hash_payload}' + + def _calc_string_to_sign(self, datetime_: str, scope: str, canonical_request: str) -> str: + """ + StringToSign + "OSS4-HMAC-SHA256" + "\n" + + TimeStamp + "\n" + + Scope + "\n" + + Hex(SHA256Hash(Canonical Request)) + """ + values = ['OSS4-HMAC-SHA256'] + values.append(datetime_) + values.append(scope) + values.append(sha256(canonical_request.encode('utf-8')).hexdigest()) + return '\n'.join(values) + + def _calc_signature(self, access_key_secrect: str, date: str, region: str, product: str, string_to_sign: str) -> str: + key_secret = ('aliyun_v4' + access_key_secrect).encode('utf-8') + signing_date = hmac.new( + key_secret, date.encode('utf-8'), sha256).digest() + signing_region = hmac.new( + signing_date, region.encode('utf-8'), sha256).digest() + signing_product = hmac.new( + signing_region, product.encode('utf-8'), sha256).digest() + signing_key = hmac.new( + signing_product, 'aliyun_v4_request'.encode('utf-8'), sha256).digest() + signature = hmac.new( + signing_key, string_to_sign.encode(), sha256).hexdigest() + return signature + + @staticmethod + def is_signed_header(h: str) -> bool: + """Determines the header is a signed header + """ + return _is_default_sign_header(h.lower()) + + +def _is_default_sign_header(key: str) -> bool: + if key.startswith('x-oss-'): + return True + + if key in ['content-type', 'content-md5']: + return True + + return False + + +def _is_sign_header(key: str, additional_headers) -> bool: + if key is not None: + if key.startswith('x-oss-'): + return True + + if key in ['content-type', 'content-md5']: + return True + + if additional_headers is not None and key in additional_headers: + return True + + return False diff --git a/alibabacloud_oss_v2/transport/__init__.py b/alibabacloud_oss_v2/transport/__init__.py new file mode 100644 index 0000000..698db00 --- /dev/null +++ b/alibabacloud_oss_v2/transport/__init__.py @@ -0,0 +1,2 @@ +# httpclient implement +from .requests_client import RequestsHttpClient diff --git a/alibabacloud_oss_v2/transport/requests_client.py b/alibabacloud_oss_v2/transport/requests_client.py new file mode 100644 index 0000000..5b401e2 --- /dev/null +++ b/alibabacloud_oss_v2/transport/requests_client.py @@ -0,0 +1,208 @@ +"""HttpClient implement based on requests +""" +from typing import Optional, MutableMapping, Iterator, cast +from urllib3.util.retry import Retry +from urllib3.exceptions import NewConnectionError, ConnectTimeoutError +import requests +import requests.adapters +from ..types import HttpRequest, HttpResponse, HttpClient +from .. import exceptions +from .. import defaults + + +class _RequestsHttpResponseImpl(HttpResponse): + """Implementation class for HttpRespone from requests's response + """ + + def __init__(self, **kwargs) -> None: + super().__init__() + self._request = kwargs.pop("request") + self._block_size = kwargs.pop("block_size") + self._internal_response = cast(requests.Response, kwargs.pop("internal_response")) + self._is_closed = False + self._is_stream_consumed = False + self._content = None + + @property + def request(self) -> HttpRequest: + return self._request + + @property + def is_closed(self) -> bool: + return self._is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._is_stream_consumed + + @property + def status_code(self) -> int: + return self._internal_response.status_code + + @property + def headers(self) -> MutableMapping[str, str]: + return self._internal_response.headers + + @property + def reason(self) -> str: + return self._internal_response.reason + + @property + def content(self) -> bytes: + if self._content is None: + raise exceptions.ResponseNotReadError() + return self._content + + def __repr__(self) -> str: + content_type = self.headers.get("Content-Type", "") + return f"" + + def __enter__(self) -> "_RequestsHttpResponseImpl": + return self + + def __exit__(self, *args) -> None: + self.close() + + def close(self) -> None: + if not self.is_closed: + self._is_closed = True + self._internal_response.close() + + def read(self) -> bytes: + if self._content is None: + self._content = self._internal_response.content + self._set_read_checks() + return self._content + + def iter_bytes(self, **kwargs) -> Iterator[bytes]: + """Iterates over the response's bytes. + + Args: + block_size (int, optional): The number of bytes it should read into memory. + + Returns: + Iterator[bytes]: An iterator of bytes from the response + """ + block_size = kwargs.pop("block_size", self._block_size) + + return self._internal_response.iter_content(block_size) + + def _set_read_checks(self): + self._is_stream_consumed = True + self.close() + +class RequestsHttpClient(HttpClient): + """Implements a basic requests HTTP sender. + + In this implementation: + - You provide the configured session if you want to, or a basic session is created. + - All kwargs received by "do" are sent to session.request directly + """ + + _protocols = ["http://", "https://"] + + def __init__(self, **kwargs) -> None: + """ + Args: + session (requests.Session, optional): Request session to use + instead of the default one. + adapters (requests.adapters, optional): Request adapters to use + instead of the default one. + """ + + self.session_owner = False + self.session = kwargs.get("session", None) + self.adapter = kwargs.get("adapter", None) + + # client's configuration + self._connect_timeout = kwargs.get("connect_timeout", defaults.DEFAULT_CONNECT_TIMEOUT) + self._read_timeout = kwargs.get("readwrite_timeout", defaults.DEFAULT_READWRITE_TIMEOUT) + self._max_connections = kwargs.get("max_connections", defaults.DEFAULT_MAX_CONNECTIONS) + self._verify = True + if kwargs.get("insecure_skip_verify") is True: + self._verify = False + self._allow_redirects = kwargs.get("enabled_redirect", False) + self._proxies = kwargs.get("proxy_host", None) + self._block_size = kwargs.get("block_size", defaults.DEFAULT_CHUNK_SIZE) + + def __enter__(self): + self.open() + return self + + def __exit__(self, *args): + self.close() + + def _init_session(self, session: requests.Session) -> None: + """Init session level configuration of requests. + This is initialization I want to do once only on a session. + """ + if self.adapter is None: + disable_retries = Retry(total=False, redirect=False, raise_on_status=False) + self.adapter = requests.adapters.HTTPAdapter(max_retries=disable_retries, + pool_maxsize=self._max_connections, + pool_connections=self._max_connections) + + self.adapter = cast(requests.adapters.HTTPAdapter, self.adapter) + for p in self._protocols: + session.mount(p, self.adapter) + + def open(self): + if not self.session: + self.session = requests.Session() + self.session_owner = True + self._init_session(self.session) + self.session = cast(requests.Session, self.session) + + def close(self): + if self.session_owner and self.session is not None: + self.session.close() + self.session_owner = False + self.session = None + + def send(self, request: HttpRequest, **kwargs) -> HttpResponse: + self.open() + error: Optional[Exception] = None + resp: _RequestsHttpResponseImpl = None + + try: + # api's configuration + connect_timeout = kwargs.pop("connect_timeout", self._connect_timeout) + read_timeout = kwargs.pop("readwrite_timeout", self._read_timeout) + stream = kwargs.pop("stream", False) + + response = self.session.request( + request.method, + request.url, + headers=request.headers, + data=request.body, + verify=self._verify, + timeout=(connect_timeout, read_timeout), + allow_redirects=self._allow_redirects, + proxies=self._proxies, + stream=stream, + **kwargs + ) + + resp = _RequestsHttpResponseImpl( + request=request, + internal_response=response, + block_size=self._block_size + ) + + if not stream: + _ = resp.read() + resp.close() + + except (NewConnectionError, ConnectTimeoutError) as err: + error = exceptions.RequestError(error=err) + except requests.exceptions.ConnectionError as err: + error = exceptions.RequestError(error=err) + except requests.exceptions.ReadTimeout as err: + error = exceptions.ResponseError(error=err) + except requests.RequestException as err: + error = exceptions.RequestError(error=err) + + if error: + raise error + + return resp diff --git a/alibabacloud_oss_v2/types.py b/alibabacloud_oss_v2/types.py new file mode 100644 index 0000000..e93c91d --- /dev/null +++ b/alibabacloud_oss_v2/types.py @@ -0,0 +1,451 @@ +"""_summary_ +""" +import abc +import datetime +from typing import ( + Optional, + Any, + Iterable, + Iterator, + Union, + IO, + MutableMapping, + Mapping, + Set, + Dict, +) +from requests.structures import CaseInsensitiveDict + +BodyType = Union[str, bytes, Iterable[bytes], IO[str], IO[bytes]] + + +class Credentials: + """ + Holds the credentials needed to authenticate requests. + + :type access_key_id: str + :param access_key_id: The access key id of the credentials. + + :type access_key_secret: str + :param access_key_secret: The access key secret of the credentials. + + :type security_token: str + :param security_token: The security token of the credentials. + + :type expiration: int + :param expiration: The token's expiration time in Unix time. + + """ + + def __init__( + self, + access_key_id: str, + access_key_secret: str, + security_token: Optional[str] = None, + expiration: Optional[datetime.datetime] = None, + ) -> None: + self.access_key_id = access_key_id + self.access_key_secret = access_key_secret + self.security_token = security_token + self.expiration = expiration + + def has_keys(self) -> bool: + """Check whether the credentials keys are set. + + :rtype: bool + :return: True if the credentials keys are set. + """ + if self.access_key_id is None or len(self.access_key_id) == 0: + return False + + if self.access_key_secret is None or len(self.access_key_secret) == 0: + return False + + return True + + def is_expired(self) -> bool: + """Check whether the credentials have expired. + + :rtype: bool + :return: True if the credentials have expired. + """ + + if self.expiration is None: + return False + now = datetime.datetime.now(datetime.timezone.utc) + return self.expiration < now + + +class CredentialsProvider(abc.ABC): + """Abstract base class for CredentialsProvider.""" + + @abc.abstractmethod + def get_credentials(self) -> Credentials: + """Retrieve the credentials. + + :rtype: Credentials + :return: a Credentials instance if it successfully retrieved the value. + """ + + +class Retryer(abc.ABC): + """Abstract base class for Retryer.""" + + @abc.abstractmethod + def is_error_retryable(self, error: Exception) -> bool: + """Check whether the error is retryable. + + :type error: Exception + :param error: the error meets + + :rtype: bool + :return: True if the error is retryable. + """ + + @abc.abstractmethod + def max_attempts(self) -> int: + """Retrieve max attempts. + + :rtype: int + :return: max attempts. + """ + + @abc.abstractmethod + def retry_delay(self, attempt: int, error: Exception) -> float: + """Returns the delay that should be used before retrying the attempt. + + :type attempt: int + :param attempt: current retry attempt + + :type error: Exception + :param error: the error meets + + :rtype: float + :return: delay duration in second. + """ + +class HttpRequest: + """A HttpRequest represents an HTTP request received by a server or to be sent by a client. + + It should be passed to your client's `send` method. + + :type method: str + :param method: HTTP method (GET, HEAD, etc.) + + :type url: str + :param url: The url for your request + + :type headers: mapping + :param headers: HTTP headers you want in your request. Your input should + be a mapping of header name to header value. + + :type params: mapping + :param params: Query parameters to be mapped into your URL. Your input + should be a mapping of query name to query value(s). + + :type body: str or bytes or iterable[bytes] or IO[str] or IO[bytes] + :param body: The request's body. + + """ + + def __init__( + self, + method: str, + url: str, + headers: Optional[MutableMapping[str, str]] = None, + body: Optional[BodyType] = None, + #**kwargs: Any + ): + self.method = method + self.url = url + + # params + # params: Optional[Mapping[str, str]] = None, + # self.params = params + + # body + self.body = body + + # header + default_headers: MutableMapping[str, str] = {} + self.headers = CaseInsensitiveDict(default_headers) + self.headers.update(headers or {}) + + def __repr__(self) -> str: + return f"" + + +class _HttpResponseBase(abc.ABC): + """Base abstract base class for HttpResponses""" + + @property + @abc.abstractmethod + def request(self) -> HttpRequest: + """The request that resulted in this response. + + :rtype: HttpRequest + :return: The request that resulted in this response. + """ + + @property + @abc.abstractmethod + def status_code(self) -> int: + """The status code of this response. + + :rtype: int + :return: The status code of this response. + """ + + @property + @abc.abstractmethod + def headers(self) -> MutableMapping[str, str]: + """The response headers. Must be case-insensitive. + + :rtype: MutableMapping[str, str] + :return: The response headers. Must be case-insensitive. + """ + + @property + @abc.abstractmethod + def reason(self) -> str: + """Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + + :rtype: str + :return: Textual reason of responded HTTP Status + """ + + @property + @abc.abstractmethod + def is_closed(self) -> bool: + """Whether the network connection has been closed yet. + + :rtype: bool + :return: Whether the network connection has been closed yet. + """ + @property + @abc.abstractmethod + def is_stream_consumed(self) -> bool: + """Whether the stream has been consumed. + + :rtype: bool + :return: Whether the stream has been consumed. + """ + + @property + @abc.abstractmethod + def content(self) -> bytes: + """Content of the response, in bytes. + + :rtype: bytes + :return: The response's content in bytes. + """ + +class HttpResponse(_HttpResponseBase): + """Abstract base class for a HttpResponse, the response from an HTTP request.""" + + @abc.abstractmethod + def __enter__(self) -> "HttpResponse": ... + + @abc.abstractmethod + def __exit__(self, *args: Any) -> None: ... + + @abc.abstractmethod + def read(self) -> bytes: + """Read the response's bytes. + + :return: The read in bytes + :rtype: bytes + """ + + @abc.abstractmethod + def close(self) -> None: + """close the response""" + + @abc.abstractmethod + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + """Iterates over the response's bytes. Will decompress in the process. + + :return: An iterator of bytes from the response + :rtype: Iterator[str] + """ + + def __repr__(self) -> str: + return f'' + +class HttpClient(abc.ABC): + """Abstract base class for HTTP client.""" + + @abc.abstractmethod + def send(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + """Sends an HTTP request and returns an HTTP response. + + An error is returned if caused by client policy (such as CheckRedirect), + or failure to speak HTTP (such as a network connectivity problem). + A non-2xx status code doesn't cause an error. + + :type request: Any + :param request: the http request sent to server. + + :rtype: httpResponse + :return: The response object. + """ + + @abc.abstractmethod + def open(self) -> None: + """Assign new session if one does not already exist.""" + + @abc.abstractmethod + def close(self) -> None: + """Close the session if it is not externally owned.""" + + +class SigningContext(object): + """SigningContext is the signing context.""" + + def __init__( + self, + product: Optional[str] = None, + region: Optional[str] = None, + bucket: Optional[str] = None, + key: Optional[str] = None, + request: Optional[HttpRequest] = None, + credentials: Optional[Credentials] = None, + signing_time: Optional[datetime.datetime] = None, + clock_offset: Optional[int] = 0, + additional_headers: Optional[Set[str]] = None, + ) -> None: + self.product = product + self.region = region + self.bucket = bucket + self.key = key + self.request = request + self.credentials = credentials + self.auth_method_query = False + self.signing_time = signing_time + self.clock_offset = clock_offset + self.signed_headers = {} + self.string_to_sign = '' + self.additional_headers = additional_headers + self.expiration_time: Optional[datetime.datetime] = None + + +class Signer(abc.ABC): + """Abstract base class for Signer.""" + + @abc.abstractmethod + def sign(self, signing_ctx: SigningContext) -> None: + """sign HTTP requests. + + :type signing_ctx: SigningContext + :param signing_ctx: the signing context + + """ + + +class OperationInput: + """_summary_ + """ + + def __init__( + self, + op_name: str, + method: str, + headers: Optional[MutableMapping[str, str]] = None, + parameters: Optional[Mapping[str, str]] = None, + body: Optional[BodyType] = None, + bucket: Optional[str] = None, + key: Optional[str] = None, + op_metadata: Optional[Dict[str, Any]] = None, + ) -> None: + self.op_name = op_name + self.method = method + self.headers = headers + self.parameters = parameters + self.body = body + self.bucket = bucket + self.key = key + self.op_metadata = op_metadata or {} + + def __str__(self) -> str: + return str(self.__dict__) + +class OperationOutput: + """_summary_ + """ + + def __init__( + self, + status: str, + status_code: int, + headers: Optional[MutableMapping[str, str]] = None, + body: Optional[BodyType] = None, + op_metadata: Optional[Dict[str, Any]] = None, + op_input: Optional[OperationInput] = None, + http_response: Optional[HttpResponse] = None, + ) -> None: + self.status = status + self.status_code = status_code + self.headers = headers + self.body = body + self.op_input = op_input + self.op_metadata = op_metadata or {} + self.http_response = http_response + + def __str__(self) -> str: + return str(self.__dict__) + +class StreamBody(abc.ABC): + """Abstract base class for a StreamBody.""" + + @abc.abstractmethod + def __enter__(self) -> "StreamBody": ... + + @abc.abstractmethod + def __exit__(self, *args: Any) -> None: ... + + @property + @abc.abstractmethod + def is_closed(self) -> bool: + """Whether the stream has been closed yet. + + :rtype: bool + :return: Whether the stream has been closed yet. + """ + + @property + @abc.abstractmethod + def is_stream_consumed(self) -> bool: + """Whether the stream has been consumed. + + :rtype: bool + :return: Whether the stream has been consumed. + """ + + @property + @abc.abstractmethod + def content(self) -> bytes: + """Content of the stream, in bytes. + + :rtype: bytes + :return: The stream's content in bytes. + """ + + @abc.abstractmethod + def read(self) -> bytes: + """Read the stream's bytes. + + :return: The read in bytes + :rtype: bytes + """ + + @abc.abstractmethod + def close(self) -> None: + """close the stream""" + + @abc.abstractmethod + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + """Iterates over the stream's bytes. Will decompress in the process. + + :return: An iterator of bytes from the stream + :rtype: Iterator[str] + """ diff --git a/alibabacloud_oss_v2/uploader.py b/alibabacloud_oss_v2/uploader.py new file mode 100644 index 0000000..21eeea9 --- /dev/null +++ b/alibabacloud_oss_v2/uploader.py @@ -0,0 +1,677 @@ +"""Uploader for handling objects for uploads.""" +# pylint: disable=line-too-long, broad-exception-caught +import abc +import copy +import os +import concurrent.futures +import threading +from typing import Any, Optional, IO, MutableMapping, List +from . import exceptions +from . import models +from . import validation +from . import utils +from . import io_utils +from . import defaults +from .serde import copy_request +from .checkpoint import UploadCheckpoint +from .crc import Crc64 +from .paginator import ListPartsPaginator + +class UploadAPIClient(abc.ABC): + """Abstract base class for uploader client.""" + + @abc.abstractmethod + def put_object(self, request: models.PutObjectRequest, **kwargs) -> models.PutObjectResult: + """Uploads objects.""" + + @abc.abstractmethod + def head_object(self, request: models.HeadObjectRequest, **kwargs) -> models.HeadObjectResult: + """Queries information about the object in a bucket.""" + + @abc.abstractmethod + def initiate_multipart_upload(self, request: models.InitiateMultipartUploadRequest, **kwargs + ) -> models.InitiateMultipartUploadResult: + """ + Initiates a multipart upload task before you can upload data + in parts to Object Storage Service (OSS). + """ + + @abc.abstractmethod + def upload_part(self, request: models.UploadPartRequest, **kwargs) -> models.UploadPartResult: + """ + Call the UploadPart interface to upload data in blocks (parts) + based on the specified Object name and uploadId. + """ + + @abc.abstractmethod + def complete_multipart_upload(self, request: models.CompleteMultipartUploadRequest, **kwargs + ) -> models.CompleteMultipartUploadResult: + """ + Completes the multipart upload task of an object after all parts + of the object are uploaded. + """ + + @abc.abstractmethod + def abort_multipart_upload(self, request: models.AbortMultipartUploadRequest, **kwargs + ) -> models.AbortMultipartUploadResult: + """ + Cancels a multipart upload task and deletes the parts uploaded in the task. + """ + + @abc.abstractmethod + def list_parts(self, request: models.ListPartsRequest, **kwargs + ) -> models.ListPartsResult: + """ + Lists all parts that are uploaded by using a specified upload ID. + """ + +class UploaderOptions: + """_summary_ + """ + + def __init__( + self, + part_size: Optional[int] = None, + parallel_num: Optional[int] = None, + leave_parts_on_error: Optional[bool] = None, + enable_checkpoint: Optional[bool] = None, + checkpoint_dir: Optional[str] = None, + ) -> None: + self.part_size = part_size + self.parallel_num = parallel_num + self.leave_parts_on_error = leave_parts_on_error or False + self.enable_checkpoint = enable_checkpoint or False + self.checkpoint_dir = checkpoint_dir + + +class UploadResult: + """_summary_ + """ + + def __init__( + self, + upload_id: Optional[str] = None, + etag: Optional[str] = None, + version_id: Optional[str] = None, + hash_crc64: Optional[str] = None, + ) -> None: + self.upload_id = upload_id + self.etag = etag + self.version_id = version_id + self.hash_crc64 = hash_crc64 + + self.status = '' + self.status_code = 0 + self.request_id = '' + self.headers: MutableMapping[str, str] = {} + +class UploadError(exceptions.BaseError): + """ + Upload Error. + """ + fmt = 'upload failed, {upload_id}, {path}, {error}.' + + def __init__(self, **kwargs): + exceptions.BaseError.__init__(self, **kwargs) + self._error = kwargs.get("error", None) + self.upload_id = kwargs.get("upload_id", None) + self.path = kwargs.get("path", None) + + def unwrap(self) -> Exception: + """returns the detail error""" + return self._error + + +class Uploader: + """Uploader for handling objects for uploads.""" + + def __init__( + self, + client: UploadAPIClient, + **kwargs: Any + ) -> None: + """ + client (UploadAPIClient): A agent that implements the PutObject and Multipart Upload api. + """ + part_size = kwargs.get('part_size', defaults.DEFAULT_UPLOAD_PART_SIZE) + parallel_num = kwargs.get('parallel_num', defaults.DEFAULT_UPLOAD_PARALLEL) + leave_parts_on_error = kwargs.get('leave_parts_on_error', False) + self._client = client + self._options = UploaderOptions( + part_size=part_size, + parallel_num=parallel_num, + leave_parts_on_error=leave_parts_on_error, + enable_checkpoint=kwargs.get('enable_checkpoint', None), + checkpoint_dir=kwargs.get('checkpoint_dir', None), + ) + + feature_flags = 0 + is_eclient = False + cstr = str(client) + if cstr == '': + feature_flags = client._client._options.feature_flags + elif cstr == '': + feature_flags = client.unwrap()._client._options.feature_flags + is_eclient = True + self._feature_flags = feature_flags + self._is_eclient = is_eclient + + + def upload_file( + self, + request: models.PutObjectRequest, + filepath: str, + **kwargs: Any + ) -> UploadResult: + """_summary_ + + Args: + request (models.PutObjectRequest): _description_ + filepath (str): _description_ + + Returns: + UploadResult: _description_ + """ + delegate = self._delegate(request, **kwargs) + + delegate.check_source(filepath) + + with open(delegate.reader_filepath, 'rb') as reader: + + delegate.apply_source(reader) + + delegate.check_checkpoint() + + delegate.update_crc_flag() + + delegate.adjust_source() + + result = delegate.upload() + + delegate.close_reader() + + return result + + def upload_from( + self, + request: models.PutObjectRequest, + reader: IO[bytes], + **kwargs: Any + ) -> UploadResult: + """_summary_ + + Args: + request (models.PutObjectRequest): _description_ + reader (IO[bytes]): _description_ + + Returns: + UploadResult: _description_ + """ + delegate = self._delegate(request, **kwargs) + delegate.apply_source(reader) + return delegate.upload() + + def _delegate( + self, + request: models.GetObjectRequest, + **kwargs: Any + ) -> "_UploaderDelegate": + + if request is None: + raise exceptions.ParamNullError(field='request') + + if not validation.is_valid_bucket_name(utils.safety_str(request.bucket)): + raise exceptions.ParamInvalidError(field='request.bucket') + + if not validation.is_valid_object_name(utils.safety_str(request.key)): + raise exceptions.ParamInvalidError(field='request.key') + + + options = copy.copy(self._options) + options.part_size = kwargs.get('part_size', self._options.part_size) + options.parallel_num = kwargs.get('parallel_num', self._options.parallel_num) + options.leave_parts_on_error = kwargs.get('leave_parts_on_error', self._options.leave_parts_on_error) + options.enable_checkpoint = kwargs.get('enable_checkpoint', self._options.enable_checkpoint) + options.checkpoint_dir = kwargs.get('checkpoint_dir', self._options.checkpoint_dir) + + if options.part_size <= 0: + options.part_size = defaults.DEFAULT_UPLOAD_PART_SIZE + + if options.parallel_num <= 0: + options.parallel_num = defaults.DEFAULT_UPLOAD_PARALLEL + + delegate = _UploaderDelegate( + base=self, + client=self._client, + request=request, + options=options + ) + + return delegate + + +class _UploadContext: + def __init__( + self, + upload_id: str = None, + start_num: int = None, + ) -> None: + self.upload_id = upload_id + self.start_num = start_num + + +class _UploaderDelegate: + def __init__( + self, + base: Uploader, + client: UploadAPIClient, + request: models.PutObjectRequest, + options: UploaderOptions, + ) -> None: + """ + """ + self._base = base + self._client = client + self._reqeust = request + self._options = options + + parallel = options.parallel_num > 1 + self._reader: IO[bytes] = None + self._progress_lock = threading.Lock() if parallel else None + + self._reader_pos = 0 + self._total_size = 0 + self._transferred = 0 + self._reader_seekable = False + + #Source's Info + self._filepath: str = None + self._file_stat: os.stat_result = None + + #checkpoint + self._checkpoint = None + + #CRC + self._check_crc = False + self._ccrc = 0 + + #use mulitpart upload + self._upload_part_lock = None + self._upload_errors = [] + self._uploaded_parts = [] + + # resumable upload + self._upload_id = None + self._part_number = None + + + @property + def reader_filepath(self) -> str: + """_summary_ + """ + return self._filepath + + def check_source(self, filepath:str): + """_summary_ + """ + if len(filepath) == 0: + raise exceptions.ParamInvalidError(field='filepath') + + absfilepath = os.path.abspath(filepath) + if not os.path.isfile(absfilepath): + raise exceptions.FileNotExist(filepath=filepath) + + if not os.access(absfilepath, os.R_OK): + raise exceptions.FileNotReadable(filepath=filepath) + + self._filepath = absfilepath + self._file_stat = os.stat(absfilepath) + + def apply_source(self, reader): + """_summary_ + """ + if reader is None: + raise exceptions.ParamInvalidError(field = 'reader') + + total_size = utils.guess_content_length(reader) + if total_size is None: + total_size = -1 + part_size = self._options.part_size + + if total_size > 0: + while self._total_size/part_size >= defaults.MAX_UPLOAD_PARTS: + part_size += self._options.part_size + + self._reader = reader + self._options.part_size = part_size + self._total_size = total_size + self._reader_seekable = utils.is_seekable(reader) + + def check_checkpoint(self): + """_summary_ + """ + if not self._options.enable_checkpoint: + return + + if not self._reader_seekable: + return + + checkpoint = UploadCheckpoint( + request=self._reqeust, + filepath=self._filepath, + basedir=self._options.checkpoint_dir, + fileinfo=self._file_stat, + part_size=self._options.part_size) + + checkpoint.load() + if checkpoint.loaded: + self._upload_id = checkpoint.upload_id + + self._options.leave_parts_on_error = True + self._checkpoint = checkpoint + + + def update_crc_flag(self): + """_summary_ + """ + #FF_ENABLE_CRC64_CHECK_UPLOAD = 0x00000008 + if (self._base._feature_flags & 0x00000008) > 0: + self._check_crc = True + + def adjust_source(self): + """ resume from upload id + """ + if not self._upload_id: + return + + uploaded_parts:List[models.Part] = [] + ccrc = 0 + + for part in self._iter_uploaded_part(): + uploaded_parts.append(models.UploadPart(part_number=part.part_number, etag=part.etag)) + if self._check_crc and part.hash_crc64 is not None: + ccrc = Crc64.combine(ccrc, int(part.hash_crc64), part.size) + + if len(uploaded_parts) == 0: + return + + # update from upload's result + part_number = uploaded_parts[-1].part_number + next_offset = part_number * self._options.part_size + + #print(f'last part number={part_number}, next offset={next_offset}') + + self._uploaded_parts = uploaded_parts + self._reader_pos = next_offset + self._part_number = part_number + 1 + self._ccrc = ccrc + + + def set_reader(self, reader) ->IO[bytes]: + """_summary_ + """ + self._reader = reader + + def close_reader(self): + """_summary_ + """ + + if self._checkpoint: + self._checkpoint.remove() + + self._reader = None + self._checkpoint = None + + def upload(self) -> UploadResult: + """_summary_ + """ + if self._total_size >= 0 and self._total_size < self._options.part_size: + return self._single_part() + + return self._multipart_part() + + def _single_part(self) -> UploadResult: + request = models.PutObjectRequest() + copy_request(request, self._reqeust) + request.body = self._reader + if request.content_type is None: + request.content_type = self._get_content_type() + + try: + result = self._client.put_object(request) + except Exception as err: + raise self._wrap_error('', err) + + ret = UploadResult( + etag=result.etag, + version_id=result.version_id, + hash_crc64=result.hash_crc64, + ) + ret.status = result.status + ret.status_code = result.status_code + ret.request_id = result.request_id + ret.headers = result.headers + + return ret + + + def _multipart_part(self) -> UploadResult: + # init the multipart + try: + upload_ctx = self._get_upload_context() + except Exception as err: + raise self._wrap_error('', err) + + # update checkpoint + if self._checkpoint: + self._checkpoint.upload_id = upload_ctx.upload_id + self._checkpoint.dump() + + # upload part + parallel = self._options.parallel_num > 1 + if parallel: + self._upload_part_lock = threading.Lock() + with concurrent.futures.ThreadPoolExecutor(self._options.parallel_num) as executor: + for result in executor.map(self._upload_part, self._iter_part(upload_ctx)): + self._update_upload_result(result) + else: + for part in self._iter_part(upload_ctx): + self._update_upload_result(self._upload_part(part)) + if len(self._upload_errors) > 0: + break + + + # complete upload + cmresult: models.CompleteMultipartUploadResult = None + if len(self._upload_errors) == 0: + request = models.CompleteMultipartUploadRequest() + copy_request(request, self._reqeust) + parts = sorted(self._uploaded_parts, key=lambda p: p.part_number) + request.upload_id = upload_ctx.upload_id + request.complete_multipart_upload = models.CompleteMultipartUpload(parts=parts) + try: + cmresult = self._client.complete_multipart_upload(request) + except Exception as err: + self._upload_errors.append(err) + + # check last error + if len(self._upload_errors) > 0: + if not self._options.leave_parts_on_error: + try: + abort_request = models.AbortMultipartUploadRequest() + copy_request(request, self._reqeust) + self._client.abort_multipart_upload(abort_request) + except Exception as _: + pass + raise self._wrap_error(upload_ctx.upload_id, self._upload_errors[-1]) + + self._assert_crc_same(cmresult.headers) + + ret = UploadResult( + upload_id=upload_ctx.upload_id, + etag=cmresult.etag, + version_id=cmresult.version_id, + hash_crc64=cmresult.hash_crc64, + ) + ret.status = cmresult.status + ret.status_code = cmresult.status_code + ret.request_id = cmresult.request_id + ret.headers = cmresult.headers + + return ret + + + def _get_upload_context(self) -> _UploadContext: + if self._upload_id: + return _UploadContext( + upload_id=self._upload_id, + start_num=self._part_number - 1, + ) + + #if not exist or fail, create a new upload id + request = models.InitiateMultipartUploadRequest() + copy_request(request, self._reqeust) + if request.content_type is None: + request.content_type = self._get_content_type() + + result = self._client.initiate_multipart_upload(request) + + return _UploadContext( + upload_id=result.upload_id, + start_num=0, + ) + + def _iter_part(self, upload_ctx: _UploadContext): + start_part_num = upload_ctx.start_num + reader = self._reader + if self._reader_seekable: + reader = io_utils.ReadAtReader(reader) + + def next_body(): + n = self._options.part_size + if self._reader_seekable: + bytes_left = self._total_size - self._reader_pos + if bytes_left < n: + n = bytes_left + body = io_utils.SectionReader(reader, self._reader_pos, n) + else: + body = reader.read(n) + + self._reader_pos += len(body) + return body + + while len(self._upload_errors) == 0: + try: + body = next_body() + if len(body) == 0: + break + except Exception as err: + self._save_error(err) + break + + start_part_num += 1 + yield upload_ctx.upload_id, start_part_num, body + + + def _upload_part(self, part): + # When an error occurs, ignore other upload requests + if len(self._upload_errors) > 0: + return None + + upload_id = part[0] + part_number = part[1] + body = part[2] + error: Exception = None + etag = None + size = len(body) + hash_crc64 = None + try: + result = self._client.upload_part(models.UploadPartRequest( + bucket=self._reqeust.bucket, + key=self._reqeust.key, + upload_id=upload_id, + part_number=part_number, + body=body, + request_payer=self._reqeust.request_payer + )) + etag = result.etag + hash_crc64 = result.hash_crc64 + except Exception as err: + error = err + + return part_number, etag, error, hash_crc64, size + + + def _save_error(self, error) -> None: + if self._upload_part_lock: + with self._upload_part_lock: + self._upload_errors.append(error) + else: + self._upload_errors.append(error) + + + def _get_content_type(self) -> str: + if self._filepath is not None: + return utils.guess_content_type(self._filepath, 'application/octet-stream') + return None + + def _iter_uploaded_part(self): + if self._upload_id is None: + return + try: + paginator = ListPartsPaginator(self._client) + iterator = paginator.iter_page(models.ListPartsRequest( + bucket=self._reqeust.bucket, + key=self._reqeust.key, + request_payer=self._reqeust.request_payer, + upload_id=self._upload_id, + )) + check_part_number = 1 + for page in iterator: + for part in page.parts: + if (part.part_number != check_part_number or + part.size != self._options.part_size): + return + yield part + check_part_number += 1 + except Exception: + self._upload_id = None + + def _update_upload_result(self, result): + #print(f'_update_upload_result: {result}') + if result is None: + return + + if result[2] is not None: + self._save_error(result[2]) + return + + part_number = result[0] + etag = result[1] + hash_crc64 = result[3] + size = result[4] + + self._uploaded_parts.append(models.UploadPart(part_number=part_number, etag=etag)) + + if self._check_crc and hash_crc64 is not None: + self._ccrc = Crc64.combine(self._ccrc, int(hash_crc64), size) + + + def _assert_crc_same(self, headers: MutableMapping): + if not self._check_crc: + return + + scrc = headers.get('x-oss-hash-crc64ecma', None) + if scrc is None: + return + + ccrc = str(self._ccrc) + if scrc != ccrc: + raise self._wrap_error(self._upload_id, exceptions.InconsistentError(client_crc=ccrc, server_crc=scrc)) + + def _wrap_error(self, upload_id: str, error: Exception) -> Exception: + return UploadError( + upload_id=upload_id, + path=f'oss://{self._reqeust.bucket}/{self._reqeust.key}', + error=error + ) diff --git a/alibabacloud_oss_v2/utils.py b/alibabacloud_oss_v2/utils.py new file mode 100644 index 0000000..670cc92 --- /dev/null +++ b/alibabacloud_oss_v2/utils.py @@ -0,0 +1,244 @@ +"""utils for sdk""" +import io +from typing import Optional, Any, MutableMapping, Tuple +import mimetypes +import os.path + +_EXTRA_TYPES_MAP = { + ".js": "application/javascript", + ".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + ".xltx": "application/vnd.openxmlformats-officedocument.spreadsheetml.template", + ".potx": "application/vnd.openxmlformats-officedocument.presentationml.template", + ".ppsx": "application/vnd.openxmlformats-officedocument.presentationml.slideshow", + ".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation", + ".sldx": "application/vnd.openxmlformats-officedocument.presentationml.slide", + ".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + ".dotx": "application/vnd.openxmlformats-officedocument.wordprocessingml.template", + ".xlam": "application/vnd.ms-excel.addin.macroEnabled.12", + ".xlsb": "application/vnd.ms-excel.sheet.binary.macroEnabled.12", + ".apk": "application/vnd.android.package-archive" +} + + +def safety_str(value: Optional[str]) -> str: + """ + Returns str value if the value is not None. + Returns a emtpy str if the value is None. + """ + return value if value is not None else '' + + +def safety_bool(value: Optional[bool]) -> bool: + """ + Returns bool value if the value is not None. + Returns False if the value is None. + """ + return value if value is not None else False + +def safety_int(value: Optional[int]) -> int: + """ + Returns int value if the value is not None. + Returns 0 if the value is None. + """ + return value if value is not None else 0 + +def ensure_boolean(val): + """ + Ensures a boolean value if a string or boolean is provided + For strings, the value for True/False is case insensitive + """ + if isinstance(val, bool): + return val + + if isinstance(val, str): + return val.lower() == 'true' + + return False + + +def merge_dicts(dict1, dict2, append_lists=False): + """Given two dict, merge the second dict into the first. + + The dicts can have arbitrary nesting. + + :param append_lists: If true, instead of clobbering a list with the new + value, append all of the new values onto the original list. + """ + for key in dict2: + if isinstance(dict2[key], dict): + if key in dict1 and key in dict2: + merge_dicts(dict1[key], dict2[key]) + else: + dict1[key] = dict2[key] + # If the value is a list and the ``append_lists`` flag is set, + # append the new values onto the original list + elif isinstance(dict2[key], list) and append_lists: + # The value in dict1 must be a list in order to append new + # values onto it. + if key in dict1 and isinstance(dict1[key], list): + dict1[key].extend(dict2[key]) + else: + dict1[key] = dict2[key] + else: + # At scalar types, we iterate and merge the + # current dict that we're on. + dict1[key] = dict2[key] + + +def lowercase_dict(original): + """Copies the given dictionary ensuring all keys are lowercase strings.""" + copy = {} + for key in original: + copy[key.lower()] = original[key] + return copy + +def guess_content_type(name:str, default:str) -> str: + """Guess the type of a file based on its name""" + ext = os.path.splitext(name)[1].lower() + if ext in _EXTRA_TYPES_MAP: + return _EXTRA_TYPES_MAP[ext] + + return mimetypes.guess_type(name)[0] or default + +def guess_content_length(body: Any) -> Optional[int]: + """Guess the content length of body""" + if not body: + return 0 + + try: + return len(body) + except (AttributeError, TypeError): + pass + + if hasattr(body, 'seek') and hasattr(body, 'tell'): + try: + orig_pos = body.tell() + body.seek(0, os.SEEK_END) + end_file_pos = body.tell() + body.seek(orig_pos) + return end_file_pos - orig_pos + except io.UnsupportedOperation: + pass + return None + +def escape_xml_value(s: str) -> str: + """escapeXml EscapeString writes to p the properly escaped XML equivalent + of the plain text data s + """ + ss = '' + for _, d in enumerate(s): + if d == "&": + ss += "&" + elif d == "<": + ss += "<" + elif d == ">": + ss += ">" + elif d == "\"": + ss += """ + elif d == "\r": + ss += " " + elif d == "\n": + ss += " " + elif d == "\t": + ss += " " + else: + n = ord(d) + if 0 <= n < 0x20: + ss += f'&#{n:02d};' + else: + ss += d + return ss + +def parse_content_range(content_range: str) -> Tuple[int, int, int]: + """ + Parses the content range header + accepts bytes 22-33/42 and bytes 22-33/* format + """ + if not content_range: + raise ValueError("Invalid content-range header, it is none or empty.") + + vals = content_range.split(" ")[1].split("/") + + if len(vals) != 2: + raise ValueError(f'Invalid content-range header: {content_range}') + + rvals = vals[0].split("-") + + if len(rvals) != 2: + raise ValueError(f'Invalid content-range header: {content_range}') + + start = int(rvals[0]) + if start < 0: + raise ValueError(f'Invalid content-range header: {start}') + + end = int(rvals[1]) + if end < 0: + raise ValueError(f'Invalid content-range header: {end}') + + if vals[1] == "*": + size = -1 + else: + size = int(vals[1]) + if size <= 0: + raise ValueError(f'Invalid content-range header: {size}') + + return start, end, size + +def parse_content_length(headers: MutableMapping[str, str]) -> int: + """Parses the length from the content length header""" + if not headers["Content-Length"]: + raise ValueError("Missing content-length header.") + size = int(headers["Content-Length"]) + if size <= 0: + raise ValueError(f"Invalid content-length header: {size}") + return size + +def parse_http_range(range_header: str) -> Tuple[int, int]: + """ + Parses the range header + It only accepts single ranges. + """ + if not range_header: + raise ValueError("Invalid range header, it is none or empty.") + + if not range_header.startswith("bytes="): + raise ValueError("Invalid range header, doesn't start with bytes=.") + + if range_header.count(',') > 0: + raise ValueError("Invalid range header, contains multiple ranges which isn't supported.") + + dash = range_header.find('-') + if dash < 0: + raise ValueError("Invalid range header, contains no '-'") + + start_str = range_header[6:dash].strip() + end_str = range_header[dash+1:].strip() + + start = -1 + if len(start_str) > 0: + start = int(start_str) + if start < 0: + raise ValueError(f'Invalid range header: {start_str} in {range_header}') + + end = -1 + if len(end_str) > 0: + end = int(end_str) + if end < 0: + raise ValueError(f'Invalid range header: {end_str} in {range_header}') + + return start, end + +def is_seekable(obj: Any) -> bool: + """Tests if this object supports Seek method + Returns True is suppurts seek, else False + """ + if hasattr(obj, 'seekable'): + return obj.seekable() + + if hasattr(obj, 'seek') and hasattr(obj, 'tell'): + try: + obj.seek(0, os.SEEK_CUR) + return True + except OSError: + return False + return False diff --git a/alibabacloud_oss_v2/validation.py b/alibabacloud_oss_v2/validation.py new file mode 100644 index 0000000..b12f406 --- /dev/null +++ b/alibabacloud_oss_v2/validation.py @@ -0,0 +1,58 @@ +"""validation for sdk""" +import re + + +def is_valid_region(region: str): + """Checks if the region is valid""" + if region is None: + return False + + pattern = r'^[a-z0-9-]+$' + if re.match(pattern, region): + return True + + return False + + +def is_valid_endpoint(endpoint: str): + """Checks if the endpoint is valid""" + if endpoint is None: + return False + + pattern = r'^([a-zA-Z]+://)?[\w.-]+(:\d+)?$' + if re.match(pattern, endpoint): + return True + + return False + + +_ALPHA_NUM = 'abcdefghijklmnopqrstuvwxyz0123456789' +_HYPHEN = '-' +_BUCKET_NAME_CHARS = set(_ALPHA_NUM + _HYPHEN) + + +def is_valid_bucket_name(name: str) -> bool: + """Checks if the name is valid""" + if len(name) < 3 or len(name) > 63: + return False + + if name[-1] == _HYPHEN: + return False + + if name[0] not in _ALPHA_NUM: + return False + + return set(name) <= _BUCKET_NAME_CHARS + + +def is_valid_object_name(name: str) -> bool: + """Checks if the name is valid""" + if len(name) < 1 or len(name) > 1024: + return False + + return True + + +def is_valid_range(_: str) -> bool: + """Checks if the range is valid""" + return True diff --git a/sample/append_object.py b/sample/append_object.py new file mode 100644 index 0000000..7c7ccb6 --- /dev/null +++ b/sample/append_object.py @@ -0,0 +1,71 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="append object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + data1 = b'hello' + data2 = b' world' + + result = client.append_object(oss.AppendObjectRequest( + bucket=args.bucket, + key=args.key, + position=0, + body=data1, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version id: {result.version_id},' + f' hash crc64: {result.hash_crc64},' + f' next position: {result.next_position},' + f' server side encryption: {result.server_side_encryption},' + f' server side data encryption: {result.server_side_data_encryption},' + f' sse kms key id: {result.sse_kms_key_id},' + ) + + result = client.append_object(oss.AppendObjectRequest( + bucket=args.bucket, + key=args.key, + position=result.next_position, + body=data2, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version id: {result.version_id},' + f' hash crc64: {result.hash_crc64},' + f' next position: {result.next_position},' + f' server side encryption: {result.server_side_encryption},' + f' server side data encryption: {result.server_side_data_encryption},' + f' sse kms key id: {result.sse_kms_key_id},' + ) + +if __name__ == "__main__": + main() diff --git a/sample/copy_object.py b/sample/copy_object.py new file mode 100644 index 0000000..50f9dad --- /dev/null +++ b/sample/copy_object.py @@ -0,0 +1,56 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="copy object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--source_key', help='The name of the source address for object.', required=True) +parser.add_argument('--source_bucket', help='The name of the source address for bucket.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.copy_object(oss.CopyObjectRequest( + bucket=args.bucket, + key=args.key, + source_key=args.source_key, + source_bucket=args.source_bucket, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version id: {result.version_id},' + f' hash crc64: {result.hash_crc64},' + f' source version id: {result.source_version_id},' + f' server side encryption: {result.server_side_encryption},' + f' server side data encryption: {result.server_side_data_encryption},' + f' sse kms key id: {result.sse_kms_key_id},' + f' last modified: {result.last_modified},' + f' etag: {result.etag},' + ) + +if __name__ == "__main__": + main() + diff --git a/sample/delete_bucket.py b/sample/delete_bucket.py new file mode 100644 index 0000000..9079da2 --- /dev/null +++ b/sample/delete_bucket.py @@ -0,0 +1,40 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="delete bucket sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.delete_bucket(oss.DeleteBucketRequest( + bucket=args.bucket, + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/sample/delete_multiple_objects.py b/sample/delete_multiple_objects.py new file mode 100644 index 0000000..e36b19e --- /dev/null +++ b/sample/delete_multiple_objects.py @@ -0,0 +1,54 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="delete multiple objects sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # If deleting multiple items, please follow the following format + # objects = [oss.DeleteObject(key=args.key), oss.DeleteObject(key=args.key2)], + + result = client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest( + bucket=args.bucket, + encoding_type='url', + objects=[oss.DeleteObject(key=args.key)], + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' key: {result.deleted_objects[0].key},' + f' version id: {result.deleted_objects[0].version_id},' + f' delete marker: {result.deleted_objects[0].delete_marker},' + f' delete marker version id: {result.deleted_objects[0].delete_marker_version_id},' + f' encoding type: {result.encoding_type},' + ) + + +if __name__ == "__main__": + main() + diff --git a/sample/delete_object.py b/sample/delete_object.py new file mode 100644 index 0000000..43be12a --- /dev/null +++ b/sample/delete_object.py @@ -0,0 +1,45 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="delete object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.delete_object(oss.DeleteObjectRequest( + bucket=args.bucket, + key=args.key, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version id: {result.version_id},' + f' delete marker: {result.delete_marker},' + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/sample/get_bucket_info.py b/sample/get_bucket_info.py new file mode 100644 index 0000000..32d3b9d --- /dev/null +++ b/sample/get_bucket_info.py @@ -0,0 +1,63 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get bucket info sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_bucket_info(oss.GetBucketInfoRequest( + bucket=args.bucket, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' name: {result.bucket_info.name},' + f' access monitor: {result.bucket_info.access_monitor},' + f' location: {result.bucket_info.location},' + f' creation date: {result.bucket_info.creation_date},' + f' extranet endpoint: {result.bucket_info.extranet_endpoint},' + f' intranet endpoint: {result.bucket_info.intranet_endpoint},' + f' acl: {result.bucket_info.acl},' + f' data redundancy type: {result.bucket_info.data_redundancy_type},' + f' id: {result.bucket_info.owner.id},' + f' display name: {result.bucket_info.owner.display_name},' + f' storage class: {result.bucket_info.storage_class},' + f' resource group id: {result.bucket_info.resource_group_id},' + f' kms master key id: {result.bucket_info.sse_rule.kms_master_key_id},' + f' sse algorithm: {result.bucket_info.sse_rule.sse_algorithm},' + f' kms data encryption: {result.bucket_info.sse_rule.kms_data_encryption},' + f' versioning: {result.bucket_info.versioning},' + f' transfer acceleration: {result.bucket_info.transfer_acceleration},' + f' cross region replication: {result.bucket_info.cross_region_replication},' + f' log bucket: {result.bucket_info.bucket_policy.log_bucket},' + f' log prefix: {result.bucket_info.bucket_policy.log_prefix},' + f' comment: {result.bucket_info.comment},' + f' block public access: {result.bucket_info.block_public_access},' + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/sample/get_bucket_location.py b/sample/get_bucket_location.py new file mode 100644 index 0000000..20ae4a8 --- /dev/null +++ b/sample/get_bucket_location.py @@ -0,0 +1,42 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get bucket location sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_bucket_location(oss.GetBucketLocationRequest( + bucket=args.bucket, + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' location: {result.location},' + ) + +if __name__ == "__main__": + main() + diff --git a/sample/get_bucket_stat.py b/sample/get_bucket_stat.py new file mode 100644 index 0000000..9554313 --- /dev/null +++ b/sample/get_bucket_stat.py @@ -0,0 +1,62 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get bucket stat sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_bucket_stat(oss.GetBucketStatRequest( + bucket=args.bucket, + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' storage: {result.storage},' + f' object count: {result.object_count},' + f' multi part upload count: {result.multi_part_upload_count},' + f' live channel count: {result.live_channel_count},' + f' last modified time: {result.last_modified_time},' + f' standard storage: {result.standard_storage},' + f' standard object count: {result.standard_object_count},' + f' infrequent access storage: {result.infrequent_access_storage},' + f' infrequent access real storage: {result.infrequent_access_real_storage},' + f' infrequent access object count: {result.infrequent_access_object_count},' + f' archive storage: {result.archive_storage},' + f' archive real storage: {result.archive_real_storage},' + f' archive object count: {result.archive_object_count},' + f' cold archive storage: {result.cold_archive_storage},' + f' cold archive real storage: {result.cold_archive_real_storage},' + f' cold archive object count: {result.cold_archive_object_count},' + f' deep cold archive storage: {result.deep_cold_archive_storage},' + f' deep cold archive real storage: {result.deep_cold_archive_real_storage},' + f' deep cold archive object count: {result.deep_cold_archive_object_count},' + f' delete marker count: {result.delete_marker_count},' + ) + +if __name__ == "__main__": + main() + + diff --git a/sample/get_bucket_version.py b/sample/get_bucket_version.py new file mode 100644 index 0000000..85add4c --- /dev/null +++ b/sample/get_bucket_version.py @@ -0,0 +1,43 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get bucket versioning sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_bucket_versioning(oss.GetBucketVersioningRequest( + bucket=args.bucket, + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version status: {result.version_status},' + ) + +if __name__ == "__main__": + main() + + diff --git a/sample/get_object.py b/sample/get_object.py new file mode 100644 index 0000000..44135e3 --- /dev/null +++ b/sample/get_object.py @@ -0,0 +1,66 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_object(oss.GetObjectRequest( + bucket=args.bucket, + key=args.key, + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' content length: {result.content_length},' + f' content range: {result.content_range},' + f' content type: {result.content_type},' + f' etag: {result.etag},' + f' last modified: {result.last_modified},' + f' content md5: {result.content_md5},' + f' cache control: {result.cache_control},' + f' content disposition: {result.content_disposition},' + f' content encoding: {result.content_encoding},' + f' expires: {result.expires},' + f' hash crc64: {result.hash_crc64},' + f' storage class: {result.storage_class},' + f' object type: {result.object_type},' + f' version id: {result.version_id},' + f' tagging count: {result.tagging_count},' + f' server side encryption: {result.server_side_encryption},' + f' server side data encryption: {result.server_side_data_encryption},' + f' sse kms key id: {result.sse_kms_key_id},' + f' next append position: {result.next_append_position},' + f' expiration: {result.expiration},' + f' restore: {result.restore},' + f' process status: {result.process_status},' + f' delete marker: {result.delete_marker},' + ) + +if __name__ == "__main__": + main() + diff --git a/sample/get_object_meta.py b/sample/get_object_meta.py new file mode 100644 index 0000000..26837f8 --- /dev/null +++ b/sample/get_object_meta.py @@ -0,0 +1,50 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="get object meta sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.get_object_meta(oss.GetObjectMetaRequest( + bucket=args.bucket, + key=args.key, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' content length: {result.content_length},' + f' etag: {result.etag},' + f' last modified: {result.last_modified},' + f' last access time: {result.last_access_time},' + f' version id: {result.version_id},' + f' hash crc64: {result.hash_crc64},' + ) + +if __name__ == "__main__": + main() + diff --git a/sample/head_object.py b/sample/head_object.py new file mode 100644 index 0000000..2da51d1 --- /dev/null +++ b/sample/head_object.py @@ -0,0 +1,70 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="head object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.head_object(oss.HeadObjectRequest( + bucket=args.bucket, + key=args.key, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' content length: {result.content_length},' + f' content type: {result.content_type},' + f' etag: {result.etag},' + f' last modified: {result.last_modified},' + f' content md5: {result.content_md5},' + f' cache control: {result.cache_control},' + f' content disposition: {result.content_disposition},' + f' content encoding: {result.content_encoding},' + f' expires: {result.expires},' + f' hash crc64: {result.hash_crc64},' + f' storage class: {result.storage_class},' + f' object type: {result.object_type},' + f' version id: {result.version_id},' + f' tagging count: {result.tagging_count},' + f' server side encryption: {result.server_side_encryption},' + f' server side data encryption: {result.server_side_data_encryption},' + f' sse kms key id: {result.sse_kms_key_id},' + f' next append position: {result.next_append_position},' + f' expiration: {result.expiration},' + f' restore: {result.restore},' + f' process status: {result.process_status},' + f' request charged: {result.request_charged},' + f' allow origin: {result.allow_origin},' + f' allow methods: {result.allow_methods},' + f' allow age: {result.allow_age},' + f' allow headers: {result.allow_headers},' + f' expose headers: {result.expose_headers},' + ) + +if __name__ == "__main__": + main() diff --git a/sample/list_buckets.py b/sample/list_buckets.py new file mode 100644 index 0000000..1da7829 --- /dev/null +++ b/sample/list_buckets.py @@ -0,0 +1,43 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="list buckets sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListBuckets operation + paginator = client.list_buckets_paginator() + + # Iterate through the bucket pages + for page in paginator.iter_page(oss.ListBucketsRequest( + ) + ): + for o in page.buckets: + print(f'Bucket: {o.name}, {o.location}, {o.creation_date} {o.resource_group_id}') + +if __name__ == "__main__": + main() diff --git a/sample/list_multipart_uploads.py b/sample/list_multipart_uploads.py new file mode 100644 index 0000000..e74bd9d --- /dev/null +++ b/sample/list_multipart_uploads.py @@ -0,0 +1,45 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="list multipart uploads sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListMultipartUploads operation + paginator = client.list_multipart_uploads_paginator() + + # Iterate through the multipart upload pages + for page in paginator.iter_page(oss.ListMultipartUploadsRequest( + bucket=args.bucket + ) + ): + for o in page.uploads: + print(f'Multipart Upload: {o.key}, {o.upload_id}') + +if __name__ == "__main__": + main() diff --git a/sample/list_object_versions.py b/sample/list_object_versions.py new file mode 100644 index 0000000..bd2d012 --- /dev/null +++ b/sample/list_object_versions.py @@ -0,0 +1,44 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="list object versions sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListObjectVersions operation + paginator = client.list_object_versions_paginator() + + # Iterate through the object version pages + for page in paginator.iter_page(oss.ListObjectVersionsRequest( + bucket=args.bucket + ) + ): + for o in page.version: + print(f'Object version: {o.key}, {o.size}, {o.version_id} {o.last_modified}') + +if __name__ == "__main__": + main() diff --git a/sample/list_objects.py b/sample/list_objects.py new file mode 100644 index 0000000..d189f03 --- /dev/null +++ b/sample/list_objects.py @@ -0,0 +1,45 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="list objects sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListObjects operation + paginator = client.list_objects_paginator() + + # Iterate through the object pages + for page in paginator.iter_page(oss.ListObjectsRequest( + bucket=args.bucket + ) + ): + for o in page.contents: + print(f'Object: {o.key}, {o.size}, {o.last_modified}') + +if __name__ == "__main__": + main() diff --git a/sample/list_objects_v2.py b/sample/list_objects_v2.py new file mode 100644 index 0000000..dc1b8e1 --- /dev/null +++ b/sample/list_objects_v2.py @@ -0,0 +1,45 @@ +import sys +import os + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +import argparse +import alibabacloud_oss_v2 as oss + +parser = argparse.ArgumentParser(description="list objects v2 sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListObjectsV2 operation + paginator = client.list_objects_v2_paginator() + + # Iterate through the object pages + for page in paginator.iter_page(oss.ListObjectsV2Request( + bucket=args.bucket + ) + ): + for o in page.contents: + print(f'Object: {o.key}, {o.size}, {o.last_modified}') + +if __name__ == "__main__": + main() diff --git a/sample/list_parts.py b/sample/list_parts.py new file mode 100644 index 0000000..7984bf1 --- /dev/null +++ b/sample/list_parts.py @@ -0,0 +1,50 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + + +parser = argparse.ArgumentParser(description="list parts sample") + +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) +parser.add_argument('--upload_id', help='The ID of the multipart upload task.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + # Create the Paginator for the ListParts operation + paginator = client.list_parts_paginator() + + # Iterate through the part pages + for page in paginator.iter_page(oss.ListPartsRequest( + bucket=args.bucket, + key=args.key, + upload_id=args.upload_id + ) + ): + for o in page.parts: + print(f'Part: {o.part_number}, {o.size}, {o.last_modified}') + +if __name__ == "__main__": + main() diff --git a/sample/put_bucket.py b/sample/put_bucket.py new file mode 100644 index 0000000..428c9c5 --- /dev/null +++ b/sample/put_bucket.py @@ -0,0 +1,44 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="put bucket sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.put_bucket(oss.PutBucketRequest( + bucket=args.bucket, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/sample/put_bucket_version.py b/sample/put_bucket_version.py new file mode 100644 index 0000000..2428b74 --- /dev/null +++ b/sample/put_bucket_version.py @@ -0,0 +1,45 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="put bucket versioning sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.put_bucket_versioning(oss.PutBucketVersioningRequest( + bucket=args.bucket, + versioning_configuration=oss.VersioningConfiguration( + status='Enabled' + ) + )) + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + ) + +if __name__ == "__main__": + main() + + diff --git a/sample/put_object.py b/sample/put_object.py new file mode 100644 index 0000000..7000663 --- /dev/null +++ b/sample/put_object.py @@ -0,0 +1,50 @@ +import sys +import os +import argparse +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="put object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + data = b'hello world' + + result = client.put_object(oss.PutObjectRequest( + bucket=args.bucket, + key=args.key, + body=data, + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' content md5: {result.content_md5},' + f' etag: {result.etag},' + f' hash crc64: {result.hash_crc64},' + f' version id: {result.version_id},' + ) + + +if __name__ == "__main__": + main() diff --git a/sample/restore_object.py b/sample/restore_object.py new file mode 100644 index 0000000..7a3b3da --- /dev/null +++ b/sample/restore_object.py @@ -0,0 +1,67 @@ +import sys +import os +import argparse +import time + +import alibabacloud_oss_v2 as oss + +# It is used only to execute sample code in the project directory +code_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(code_directory) + +parser = argparse.ArgumentParser(description="restore object sample") +parser.add_argument('--region', help='The region in which the bucket is located.', required=True) +parser.add_argument('--bucket', help='The name of the bucket.', required=True) +parser.add_argument('--endpoint', help='The domain names that other services can use to access OSS') +parser.add_argument('--key', help='The name of the object.', required=True) + + +def main(): + + args = parser.parse_args() + + # Loading credentials values from the environment variables + credentials_provider = oss.credentials.EnvironmentVariableCredentialsProvider() + + # Using the SDK's default configuration + cfg = oss.config.load_default() + cfg.credentials_provider = credentials_provider + cfg.region = args.region + if args.endpoint is not None: + cfg.endpoint = args.endpoint + + client = oss.Client(cfg) + + result = client.restore_object(oss.RestoreObjectRequest( + bucket=args.bucket, + key=args.key, + restore_request=oss.RestoreRequest( + days=1, + # The restoration priority of Cold Archive or Deep Cold Archive objects. + # Valid values:Expedited,Standard,Bulk + tier="Expedited", + ) + )) + + print(f'status code: {result.status_code},' + f' request id: {result.request_id},' + f' version id: {result.version_id},' + f' restore priority: {result.restore_priority},' + ) + + + while True: + result = client.head_object(oss.HeadObjectRequest( + bucket=args.bucket, + key=args.key, + )) + + if result.restore and result.restore != 'ongoing-request="true"': + print('restore is sucess') + break + time.sleep(5) + print(result.restore) + +if __name__ == "__main__": + main() + diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..29b21fb --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +license_file = LICENSE \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..f05ef75 --- /dev/null +++ b/setup.py @@ -0,0 +1,58 @@ +""" +setup module for Alibaba Cloud OSS SDK V2. +""" + +import os +from setuptools import setup, find_packages + + +PACKAGE = "alibabacloud_oss_v2" +NAME = "alibabacloud_oss_v2" +DESCRIPTION = "Alibaba Cloud OSS (Object Storage Service) SDK V2 for Python" +AUTHOR = "Alibaba Cloud OSS SDK" +AUTHOR_EMAIL = "sdk-team@alibabacloud.com" +URL = "https://github.com/aliyun/alibabacloud-oss-python-sdk-v2" +VERSION = __import__(PACKAGE).__version__ +REQUIRES = [ + "requests>=2.18.4", + "cryptography>=2.1.4", + "crcmod-plus>=2.1.0" +] + +if not VERSION: + raise RuntimeError('Cannot find version information') + +LONG_DESCRIPTION = '' +if os.path.exists('./README.md'): + with open("README.md", encoding='utf-8') as fp: + LONG_DESCRIPTION = fp.read() + +setup( + name=NAME, + version=VERSION, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + long_description_content_type='text/markdown', + author=AUTHOR, + author_email=AUTHOR_EMAIL, + license="Apache License 2.0", + url=URL, + keywords=["alibabacloud", "oss"], + packages=find_packages(exclude=["tests*", "sample*"]), + include_package_data=True, + platforms="any", + install_requires=REQUIRES, + python_requires=">=3.8", + classifiers=( + 'Development Status :: 4 - Beta', + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + 'License :: OSI Approved :: Apache License 2.0', + ) +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/data/enc-example.jpg b/tests/data/enc-example.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e498969bc41f1f5b8c942d83a3336abcf92c9560 GIT binary patch literal 21839 zcmV(nK=QvNRg_Hk4x+SM2m7F|ke+1Mn>sO@qbs06!kR?lxDnz$rH(!ebD_;iszZ?( z9X28*R&M_hj);i=S8A1|J0k%OiCzd&%&h%MWX3ez=UNss>0^*dHCtQ`o2FAxqM@(% z$g%$vhuUs$VW2I(XGcY$j@8}WC)ol9pA@Qr1rutx@oXd&V4D z(kX-HzgrsFVWuYPH8Mh)?yjr>3<>NCU^r)*P%yc?R?mMnjx>B;d$G3gR(E3`q?u6B zV1O39p9f7_%O*!2e3d+nZZeTP9U06;#wgj~IFvW^H=n&GaJI_52-YVjdy1YVutMLb zIAtS-xyD1;%0>+qAV+4$V}>L*KF_G7*V)cw{!GjgvxLRgRW7Zqxy|8tx1Jhgp7C6J8KCx+(|qpBcjfG!0N_GL1v@H0 z=k$tr_ta$YFUnR9$Y$VC>w^R=W^8rL+WTiGkb-D&Swo#~2;^awGI5?F*dP1?DC!Au zvm3iEJIE74%HIpw`p5@~6*@@OJ)lA3p32#P%Aw4BJMb~`T+6TJLb-r1)2ZCN3<*_0 zeU}5b5s#i$(&>WBA)rks-j>dMp2mq??z1kkph#O955?JbL`#G^E+bwTaC5MMI^UwI zK`m8RT<#L}ZVXMq9b^{YV@%f!t{C>C9>ZQ@pmbqO0JQO*SHTbuJ zYOA){<7bVY?)_|Qb_QV?cPH8B9lrrII(ILT=EDK-Pz+~Vw%aBx!KxlJ;Yj1vHb^F= zD}r)A^ISCA0aV>RVa6v6-^P}XFNs0Ke;$W-2y_BV#Vn9%StZUOL-{oIXwb1IXqdh?*TV zZZqNPwG-s-B$fY3m;(Y}bhICms8tu0QU{3-swOh-sMID$-LGgZ8t%aD-Bnz#6^or& zCC5(wI}g3UA=m>j^ab6o(7_ExW)3kK@HYvrXmA>#5ZV%H276U0p;@NkU!$s2UQT1; zsURwenpEc3q!U$V?Ql#2*@%o72QOe9nUD}}A1xc*^#=s`7Lrx4)Mq!5Lll+~Lu8^v zw7#YfnhTz;NE2wPGTL4)NCl8RL46&mLvohw*7_B-tfnrQj{K$5xOUxG1mNpVroa#vehZd3*9zU>8h;!ktN8~EtAwhBiK&k-p#2fvPwk%LLa+47AbbF^Ct>DSKsllAjvI|pvG%4U9 z=e=C_e`m!GvinS;(lVo1*CGl(A7rf`cw|ejUgrL&46+Yd+HcagO=HNTo`;Zot|%)=pL;&s+z!FCESmzoP9f46-jsnf9K2Dv6q)T+i$))!b%u&XEfG zml-Up?4BosT=t9W(@dm@2XFN>QfMV7{Kf(X9z2>!Zh4eYn%DoSR7W4s-H-e*rrPKr z1+wO;3Y>)nM;MR5Wr1wMkUh`9*u$tz9XefQ!F zNqauxW&el?9;)_X$JNr&mU~+O`IC+_K!cb08(yCa>P)6iX8Hr0PwimQ9YHW?k@a^3 zbNi?pn4A9M*b-;(LegFkq|Z-QOJ=Y@I5#K*NDQ(d={F>X12Z=ahS?bvG8=?5Q}J+1 zy<;55euT@WaLkSJ=vEMDseq(DV#q0k7Tk~C(oFRz#OKv70+By-K$8W=oFEG9zYk)) zo#xSJnTuu(r%Tltgy;&F*vnkR5Q(2Sa1wXQcXVOtr6?*y{ljTTKY%0FQA-Daq|WPV znqmFFl{retR)PFm)p0iqK?GhPrF~S&{*PO%W!PMDu$6sCl0LLj^u-6XORa3DjGU4p z|Hf%v)*D01n*9NK92aZ|Tt_+UKs7fAcdG8an5~W~Q%e~zF^40c4Y1zd!Le+dz>h;49aSsfj+c% z#rajMRy$SU$rRp|-BTX(Y6{Z!24dz7c>LGy>lH4djxo6{gq+w_=V16L5;XpwIe@bh^Omw22WU?8v7{lhbTN7Xy}MLj z4w4VCaowBF0A%9!L~8+OH!>$2bNrD=ui^E85+6@X;zu+)Su8L!8I_8(8QJm8>`M!rondo9kU@&i?r^K3Mig z_Mz)r6`bc*`DiQDyZ-bouprf)`*rGYuoojA+={tA`iOyttciW@JY?b6Zu8=I!2$>= zHy-kXPw)Opk~C{bTCLYQb%mp(&QBULPlb}Y_p)e;;}g?BTD1Sx+WKS4CDs`KdTuzm#UJ=Tx}^iv?LR(fmG;)>u7_`5Bp%s(aV zj)2b8;$GiGbyPo1sUc(yrLmd6S?Z3bZfgu0D&v4FmkOvO_YVRztDmiR9>!A?FMpZq z@0tVi9C3JUwTuX6K#M0!tfy{WCm~Wlq)OfN%YeYUdSY1|D=3HkFo_I5_QD6-Vh3pO zTs)UgPEHd2W^ba|RQ&=P{&vC|B*TI3Q(iQi)Np&LAmsyI9H_9t9Cc;P-98m$Z-9Y` zv+L<|4I)-wNZ~B85YP#|JK&Jd{_uE>*&{;IM3=#i?M16^fPL!-tJ@O>c`Kb-aW^jbd;6A$y!w#~v@li?pC#Lg4B2Y{L5kpj~U(#Hme76h?} z^(H&3t$ZQxdh#dI&<)vFGh{UzQb$(X9SS`j3f`JE9$MKc3U(U&fZl>PcW=pxNZDnq z%=VD=5|`ujZPmLGw)|ZZ@Cozk@1(H)yD;aR?%?ZH#O(mbzod)7d`_V3mAu4$OLb4A zi&A?etkz*w2{-`X>hv^LH75=qWC^PbGT-kztET;aIcBL;aOiBUSr69`s%&mSCRQu^ zGk8Q||GUG@Qc)cf4WjGCJQvc9gqpzqu+$xu@ZTyc^#GAZu3dK&%B*?noVJsStjn;@ zF1XeTm%$%U&=96{Y)zV;YCr|BfeCjo-4n?!t({(z5S=?7>Q6h;;s<&{>jhPCzZGRR z`%n4PIt-o)oiPA8F$>t&gRMK^AT@224VZMw5wf4(2{Ac_E}WR!?Vo@YKCqul$HUZg z!ReQrEc~m_b&y!HbH1`5Ecj0k7azQ+rGV3i_m+f9eD`rsCpam9TrO)lXudG7159~~ zucWYf50Ky8N%-bl90b>&9M0QOz0z=z4i|t7D82%2Q`J^X#^7~6T$+cB_#j>! z1gecy-U)L)U2UNun&-nDPmR*0a5kdl+$$8>!z>xalaC>O*jZOtC4rHf63{8NLKzI& z#_``^dm{ZG317!jyY9$M&~IdGirs5u zzIzpwwFNWy_5@hs%PWZkWP+KM&7-gXOg4%mzXW(bPP7pt5vyRkVLEd4s?)yb{{vaL zWieTk__7jr2jP!)#vU&KXJsn?OvNtSHL$@#Va2p{)Yu#u%g2A}woUy(mG%WyAJe9J z`|FOnL_%e5ugU>x-S^6gjDj)=1g<QbQS*V zpJuMb0bA4>=c$;QcNw%21>DTt(eqY&0Kn*W&VH;_cGRBWiS^?PWYmmo&I?k!+1qR? z08K)k>GK)wfSt;><`nt$YlN&@ZJvHBh!B^!y6^r^e8$W_s4)Q2p!!dO8V8heC!+tc z+riR}z4tUR+wn>8srdc0juGi%JC#A>@CTfQOjnEjS?m@ofV9zv2c!un3pP~hZEO(& z!5a*gzd8Df5E!$BVy9W03NZ|>xtZsyB-|BEZQ9|Am*vD0v?agBKFs8qm6eE|t?$~h zZ3&%l^YT{;kCZr9Wu?5NuL@*M(LCh6VwdI$c;%iSjBTSmM-An|ntb?M&uUesS0|HZ z1*983&w5|@y&k0#>Gd~q4%ZpWlMIvjQ~>TCMX(>xK_d*5;tk~1a1wYxaF@@nMq8{^ zK`XEB91pyRq12vBwvI^9l*QFlVJ-;5zJz?&ncO3zO>{Gr88#^U=6?LS&J@DC zDn1q3JBRTW24EXr$3_S6BcHUCe~FNsiHD8tYH=hpj-=fsDbQr7T`N0YT@8V?sgsT3E_|^B(vqx~0l5y41(5Mrw9vC<{aUTCMN;td?E!%2psCHq?4T8}B_m%i$t^-k20 zH%L|+oM%2?5_!K>f$%G9XN(OBBWkt0APuR;+G=KUecr#KBy_eK5z`L|LJCK-oS=_>CGV3AE@v;`%E$(y4>2#XA$#6>}@eU27fVJQ1 z4WOxw*qszKgBgYt)UUk1T{0SB?BKkol`3!KxJx|$8UV5N|cs&D9Tyw znVCN;|FCy;P#Sf0z8p)YIHnjZkw8f!fhN*HPVl4th8(zCxxL%I z99NC^#yMJmN}peQ9JMr?eKb(nD`fe$0wivVD_?qW|K*)qo1)~Z&Q5e2liK=#7Y0Y* zmA{i>!P8pI{vQd#s`=b|<~nAxE!kdx>N>$6uqKN^2wm-o)h*xPGX0I;f5t_7^OGDY znJ;&V|NL4+*G~1L3I$!nrY}3cQ4&6b3WTaA{z?@&vD=We>?w#mm8M1Q#|bZ}b?3@O zGS(PG4biM7am&PF47G%?L9Wl28mQ_;ji&{OO_^tkSz!~LsOOL;;=*N}0Ui>m ziMa;LcY29FTunW*xY=91=V_;=OxPoq#Ld5o=2q}kg)h89*4qp~oMFz`nWPwe85u0- z8zlH>V0$6yC_dApy2L>B;hH&^;_tyB@cNZa9ub5}NwN8bBl?ay43HSN}2GgQ&D!6oFHFP{eA-@T&M@I+a0l`Mj-M_iV&^0rD zYBce03@6t0J=<)NU5BeC(VWj$%+mksvRF1caCcSA2^g8=oG(^4f66ZnkGHY^E2Cs7 zX^uNlGI$}N8NXU(bD3kIp893LL8wN`+n0pKUffx+G_gG+y0J7nF}zpq-%iPezkv~c z4iuN&BwMS}#Sw_5g9`lfwuV{LRoAe@n=6)mIFC(^KNr1zY+fMKw7gLvl$~!X1V9_q zUpHBOyR+r7jbJHebuOaDBov6mE?<*vK(@ItClCy#iw*8rGUI~B>3_lhW!>{d5q4}S zNk<*`Gx$%oHfGflqeDZ1LQ7ok(`S5ouomVH^Od&)g^<0Y;rdpGIr69y)JmF+ljs5K z%w5sQ^&P|!sgH-_xqAt`WIEW#>j%S!!A=l*?Vc`)LeckGdRt_u?u+kI~!&*e0yq4+ko?v>|j&9}oJ7dt>C zK5R||dQb*>8VHv3+}9B0z$tV%GYO^cKGYt1z}zH|KgMHzng_eMMmbEZ#{zu%Y%@)% z0-^E>|2SEDPtGJNCodHVshV+Rah5J=YxI_7?8mHKL+CvB%{=rqJ0O2N*4L)1CL`}c zd%D)fBdlIB`a732g~DI%rQIZ6rnD}rg~wdOcP6_^y;zO!I(4-^x{sXL{gyQn0Zvdf z5>t3f`v~59d7X_W?&aW&(Sf`Bvu~e5poTE?e(E{BiW*{zc+wGrJUb9Lw8`=MT~j4T zMLS`SlTNe=31{^1>IPclaXe^2QIfM+E`SZs7PWCzMI>EDWa*F%<&}!3{Vg5P7hmJX zVKh~(BSpP47te^n78nMafsl~wQJiq-wKFkD1&Kz<#W*_0P_;*9KW-k^eTH=gKbC_KeLoTQc5JIV`;{jr0C<$&e zv2?=;CV8&lzx?={QI%w8{EPKLKYfgn@6i@5{rKqdQ(O|?7g_M^bj3p3HprZ%GHHrKYuqVMNg zB;Sw%T}-3+E?3l*94DKa?c%k5{jQ)}PxZA(a@a&0*!Xe5^~w~a`Uu$`2V~w ztbxg^tNhsd<<`vNVgb^(BH2VX#lTn6?8n%*)11tBYa)FmIoH-aXqKSi9&7N$tj91J z=B{kOa$O3JkMoJNSfi-o!r@0mga^t4m z<2_Y{(X}I?^6AIRz9bMCeW)${=NSN}9zHa=;_IrEUE5WNgf3iPuL} zMtZc+13`T0yjst;VW*%E36zj&jCV+jO05Bpps#wrRbGU2q*x6vO_zMc+STS;)uxC` zM+f1>4cLy4&)CbOA{X%klaKnH3`8|ngqZleq>@3a{GNj(H{zVMrX$#m3d~LR9$u{T z6lGC)Im#X<`Z?#HAxluAc14KZK4&zVHFul5Yy&9K=3Q#4!X2DTsX7$^bEI*qw5F!c z%H-e@_~Z|}LP?Q(!}k1hkx91Vl{vaeHks!LMw=Z4Dy8}xSbzs%HXDNTi7tP;@bi?f z%}9AhYpKwZ3Qx+dOTPyFen)!+FZ2g`JjvIc;yj}ByYi;fQbwM<*e#dB#U5b_o1r@) z?ZKsK;7#+b-9#C3i@=I+hGo})6BJIpyY1^m`}n`va8`m*vGe&OVQ}n&v|Z6Ga`b>< zvi>|?SC(6Qv^_KIynE5cfMi|dvw#@rKz`uSMoT{42(h+H<`c6On4m}QCjgzEo6`c| zGLtCX9fVFDINF7Kgp;@t+H!7s=BB~xUs#M-!oK@deEBpJ?L(@Dz;wkr?WE3n6)Fr9&V3yE8;`Sb0mS5B339RT8Ci8_`~7_{3cf z%W$0}hGTD!KY5=5Q8^u=mi5>HC#OITwChKwUWHNHugjj1*Abr_^*bMQJG|I_e3Dij zgDGe_*=0-2H+sX>KWqe`}n*S;-?cmg-yr9u)1atuj5S8Ax5BzXor z@>?K9iyO7=osrn$uFeFSG(OgY>bMmFI8IRJBrGW$d>bGlla&nF*?)pH&hIr8#q!gM zNNiyd!?I*MHy$-%ow}Qy@2Gr6P>Vq(+&u1`+d)2p)NG9QNW8EYFjZFw=}P|vJ}mT^ zW8?T`C|#MsCrxD>j3D2nrxDnxwz~^A(y45_TD7i@a^{A63Rp z!Y&NmgP&M!#X|BUdIwc$YG$`1KdGEP+zz^)iY8`XvV)J4KFo=!32h-5O^-U(=twrzF?$Lm zt@zCMTzJ*B&=%$4C3y0=KK_&gV>4Vb`GM`viMDp8C86~s;rKQ(!Y&;s)Qqx}r$7Pb&15x2;97o@D;)A#*a&RsMH4k$LWbzh?X64z2RO*ZTOlY~v+ux| z8m4(@A3=IgR;m0B1IHMW67=spmWz-a@{T^@sEvH4jG3;+8E~2f=jeQp{?AQMPx?*i zIqDuvRVJ%)#}lg`RI+;OTL!H8F`Ti!-vgx0NMs=X*+iO$tVDuW9|h_>zC} z5m7^)`!KLKakG=2!<9HI$G4S1^z;#65%iRpDFX%w@ICaizGT0bE$cz(rw?(4p<<+V zjSIO;wxDMgo%Ka)_oo7tvFiM!Y_Xs~RKf{P4j9A;Za`N-NTJu(!TiYcG2Qo{wWx&9 zOy|;!>Dq@{c`Z30iHq}gynytS9P+N>x823v>TJb+SV&z%H+c2=2yQu4nYjI~-}mfP zTa4&q2QOcDV<(k;MIv?2kz)@Xz&l)b4<6y{bX(y*VVEVkBMN5oz=^)g zbQ@>kHb+S*_=I6|Gi(7UqP3ET~7C*l63x?mwk|XfRUiEzNrtpuDacO`)G8&>9L8Z})`jABL zZCbFX?7Uq6=;lR_z+Na`YB(KGA)Rc8j><64TmE$e#iu?4(u*j4Q(%R=YdKKA@m@oY zQxqcL;(W$tj5=Idi_2oJ&1F`DDt<+1JCB{&Khi@_|8Npl9whrvNYw2TT^!vea-DO44`nYV1{FE3%tzY)0 z*oGu;V_aQzcGuu`z&TP!QTecsWcdH7;_RYSl*p(w)Zc1o!IOHfJDe_uR;Xj z%eJC{pyQ)jm;k^WCEBmdsP%e$3jMGvYb6p-#O6!bjbMS%mw`8lMOzLK13=8VG;*IB z;fklYB&=1&a3yyi7qys*iNDU{Lv{j6o{q&nL{SHF4(jH2q-IU@v`8~6)FVKX+tUZ+ z?{d>B{eW9N<3Ha*wSa&<2%PhEsXU*&0W;67KDBeVgAn#P8{cKoEH+CLH6wc$@H8A@ zF*k+fgRH)Wf5O0VGS_!je+caG<-nb7x;Gv1Q1oB15IONX>l~vC`y6tjYq>+4OmI1C z%CEj!(C?rZ`$SF*Pk4n<`Hg-V(L;+{WDH>>#1=~Fn`joi*2z|7*i;Nas6J9aQ>?=x zVC9C?2D*m4a%c(m1hJ?!i(?KmUtuEgO136Gz?EU9;RPsIlwW^*!yZ0&uUlI&{Dz!C zRc}zUO~_v-Pg@_Bd7oI-pb0Wwf37e$LPi~@rKIJmei`58PW5)&g7V!ZjuU!GCF3<) zgLD3%mzwRd(}ORWsBMcLm(LkNz#iqPFmef_K|*{v<9!p9{d*`d8FL0c#EL-G<%w$; zZ}AyC*#57y>UMI^1lrSM4Q#6;vvueC&O-+{3W)r5==(mrm?WJ&1Dvar@%MLgX!ubg zyHo46GL&nOer<{C1H!89Y3x6YDFN8KT+Yw_zCtG z!t=U{^e0B~*(L#oCR8k<47>9(k2p5sSoY&dp*7$VL0YFMXt6RmM~L+bHBhfJz$g_O zffH+kq`cSITi*;@w8s~1^nt&MLw)%t_$?CGo;o}8nO`RC@F>RTg$7o>M-ycK4b&-u z9+&h;%!59mA3cWGm%<*Dkx(@@Do>1m?(^e#WAr=f;&LEmA9&k5V5)6%G#(#&Fv7Rp zluejPk|yEo*)@1pPX@hknRQ`@0CV)W%a;8tn&N(^?H+=u3)8C1iF0+tVLHR=Aa|)0 z9zjX)xOPQ@ZI*^KLuEdVTof9MQs8}1+DzfVd+ZJ~vT2A3G|ZkUxf(m7fOG3C$)|~o zzX;nO4^jh)S%QIaC4c@7)841O`v;>XH-~2Mq?9Qi`L6rJh{lL#gfz=ZkieYS7LRrA z>d`9x9J?(|bE2!=d(PG}>r>FROidBdVOlQKpE~$+pWKU4z@^2Agd9>8Rm_VMi-YIL z5^+lKCB|UO(%6N-dUiu&lnE3AZ!&3M!~;`=^(HxBnZ_cLC;e{Fuu-05N9LEtN}Js# zWM45v~@-$B#-$8BYNE@y;mB!^J?a|L5S_oph zub0!@C0vV7YuK{^&ArojlqgnVX_x>kxo<=*HtVSI9NFuYKAdm?A1=yOpd&0(_Q6FD z=G^5nTLD)KUR!)bEpQV{M3l@@#{2JhS6ln_h*cmT$wXxHq5E$qX;;2}mQ*uSU#Q}F zOST}fW-}&88hu^FatAPHdk17xF^`)h_$wEOPeJ=IDvsNE0X-o?b-nOsz7?)vZZ6|7 zXUcDP+`{dnrvh5@6TTfbz#x#7|N?cAV~K9VG7cDPiww z1r*wa(@qdYy6nE^VDOId@qm*U7a6>n;x6-l?IB)p)9QPaw}L{WXms)S&@ruqM`!DX z5A3#zGD5V4sR=&kexyB z9RvdhcgUHEK!-GcIH#{)gJ9#ZpUCFe!Rv3tsK>ee0>`8iHfNgQQQm>$;6790OnDf* z9=bHQI|vn*pF2+bpq|o04$4)mx$S&>8ns}01_d{4{%V`4TF3m=jnGqL!q#BS)g)!C z6WBzb5X4l;ELfc9?>9HyG^Iz@(6R3hSMMhMkFr7;wsuWHm~5~`{FKE*o@=d#jq>%4 z(@BM-S>HLX*aDx_OrGfbfs^XppW1}B!0{&hjJp|)<{}`Mo6YY6^F!dheVd@t>kGXB z^|3DzpL@6CB9WFeD6o)d;e}`sdZ(nh-D<0t7P8jlXOr2TO1s1Tym=Vtl`=zVoMXnn zHSz{##2C0<^HKp#k0qCWpZdTVrWIP}2j1$mKu*%iF&EzG-9V)Que}^hvgQ#woTmh^ zBbmTh-pul(j$iWUqsAD}x?=)q=bD3r)EJRzj+}&ra8_DArVVF5ZS(n3Qw6nNzWuf8 zM?RVOL^;S7n+zb@A+aSL8RUlbgrJc;JocF1>`J_5Bl#tTsT}^WRHw*Sp;Kk?N+&6n z)X>O<$U1CED>n1tJ-l0ghR+ zap$4gk`5qFjd{Z-TUq0mcG7)q>~To|UC-ovd#;!M;&5!31%;JEroqAwArrkYxW_A* z$qP=_CKC@Wm77)jk5qVO*6de;E$Q4tE6EuMem+eXYH?{T9^Hj8!-02`$))PKCT3rPag{G|rk1jD=R#rBrwr&H~qwS{FDjgt2j zT*!X|xQrP|DrZaghV}4GXcXK@;S6t?0@t|fpP;_H7pY(`{V{0jTekdBXM~7D`0dYk z%T}_%=SdBM-eNCrn4n{z5q^p;3Y_c9_ER54CRr+>WK+!!y!Ux@ZsORjoYyzI;FWu^ zhxc@eW1tJ$ZJ>M;Zmrb_SNhdoAq=wYM#mSPUJZ}h9dm43&6`skQ`M@9#gE4%V&zrn z5|PNnM4WOt%ywsf)QvJ)P+SCl$7d5lxG$bv*mOnCIoSD~|5C>m3JuE$h3TP_hiOF; zyXp0KubX0};WUF7Vl6)uZ#M_T8X_mjR~$s42@of?jGne0g%$N|FU~dY`Tfw$2b^F4 zx%;EN%(Wlu?mz6OHMt*#pt;yxn;~5zqpg3%l9o=cO<Eo!l4*10R##bIIayD`*Qpcwy!NU>ImKG&(1N0t0nXZ8{GhzkbOd(=ZmSX7Z z)PBOJ$5601--;DdF8xEC+H9e3j3@)C8g-m3B{eg_Fb?asXnU(WaH(9|EpuF1Ml4PG zxfI*2$A<5(CwdQ_|5^UH6KjWPwHJOpZjSR<_M#p%6cLiXd63+HV-GNRK9$7rh z>`TwhW_t!MUo8V@@rJFwGPJCnyEg^@ui&RZJ>@mV*pe!A6-e}3>}L@>v{9UTgeYVi zCMT_0%i!_1t}0TZD2F3{mM!V%+E9Pgw+V!stI4m;j&RoNC7y+XP|tUG>v9!ribKTB zfDY_qjaTnsE}bxZcJ{zokB84n%g{{P<_k4# zDG4&8*I?*xlrKjZ)tC~w>G15#BAN%&Ma2oq5p`Knr36Q(tek4)#7_@t=s*_q4s%2E zf@TEDJtFAHiZ8VJM47rbb7>?P2$R0q|w6L4RT@8J?0jOWm?yO&3^5F1w0g`w0=+|IZF`S z3ay+>o|6dW;6f)sx{N^sDAtDZBhWo+m^cmxyqvY-I?|E$P`I@J>)+P+PC6heXFUrC zYQ(FP;q+HkHKmCjy=2w7bYd-w?OWu@{YuafJ@@=9g9SSL-a^kIROLsQI=F3Kx!asL z7>`Hbw#b$`pZD$jp`Z9Tig|9cNz`Q$owAer&X1ZQI(3c^( z5tI|UJdb0~@?6}!-2OXG(Ci#q+$XVGN5h z^G%OL4H)2@&+X#)@(%pZcw)560$4|Or$Fl|7sfw(kh+i&gUPu>)ZTnoAN$u>5J~tK zLZuHGgr2>M=}QV+m|@kOrTCDOkOdhP%UV5u(;30WH~PZGfvFK?(EqfxcKan@?MIuX?uz^tI6IJ6*bUF#q%>b>hj6SQF!XHEPU z1~T*wpmpls_%5W|+edT!6Ba%t~v?YrAz)oy#mJ1*jbGZO13dLn2@~Qx^*8a1bESB5r=e$Ly zL6>;w9-9_Y+mQ)JC+rYr=(6^+0YQ$9BhYdh|W1&ua zrPSI1soXpp#$Oz^PUiQIjW&h#>d4`*xQI&GpcI#L%e*DzJA4BFe(gJC(>nHO$nTB*gpy9@~Y+{;@fL5 zd7-`t;07OAnl(_}8m51`{yku7_E+&Kz^=HX2Dz9j1jmj@XDE&c21V@Ck1M;N_=*`* zFa`OF(S=w}P*rTnn~;)zsI^*Z!6K?#*NdTB!R=zwbek8RTxRL^(ZIsba%buRn+5x$ zf!@<_x(M9ByHGD@z%||K@S|i}5kfSsG+*z7XJY)vmI5_&Bq&lDR8QMqf_7+~s43sp zu5M${i%_}CcHO0ROwM2DGQJRusv67vzU>5}>)v8B_A((KeP?uqyZZSf{+{LZ83%?{ zrlI>`6IwDLb$$r#x#fK41X2BEAX0tfOQ4p0hFuf7VSc`ul~c^PA&^FZGMSKguRiqA z5V0B9MBKD-@;ovorRQ{Z!f~#D#2*E{Q9RgSs@N3`O4Om7c~QUEcv)IErI2V<$4X^mQj-sjxCN)5x2(eU)fsjDRbyJEA!#zo45O2+p7}) z465OzsCuzfCTc)^l!a{d0u0RQKTxKnsG;tn{G@NzG|PS*?NK6W(VzucCphZd?Tthj z%L0YGB5k#;%9O40 z`{8^r@ooAn86S!N1Zj=8J|S?jh04bIBN>FmWjMj zq`&2P#Oxze6mr`5$P(`h8Tm1H_D0d?51dRnx|cWCMLU^#Yp?pyN0)g(5Gp+4XYxeB z#iv!3+oleau8gh|;a|nj7WQiR!-tbrl)hKmh@hRr^P`TZ@Y!-$oe1-+3NM*Y3Rena z6I%Q)z@~ZHHDq2Wjf&$o#=&#ez~Itow8bY%V(WjduH$uH-1v4@!R9t4i-(;a0G>Ty z?r=jjzBfrqKkq$GUw5nWi6}yXcgcBp-bmfSzq60Ay1po55#|H9^5B+^HN?spQ#W zZmQG|T;j zvdL&PU42-v5#-y}=jy`b&{Q3MI*b^p=O`6Oq$PR_zker0*5V~GYIgZV?~RXkDcQi* z(jMm8=+tUTkrv~$<-F?DpED2nIbgo~P)qdF@~1YndX6t8NjWv3H54hptF-n&>B@xV z^3-Tz9!7@c%fTS(46KHZ{s%`74X$mqf@fpr;mVUG(5W2~p;?}29Er7}G&+d-tvVQq zn)gHRo78re%XY4yQEQO)=XFT?`sFaO5w+GAJk|!Nt1eBxI6fhehbgMXaP9{!u30FY zlkFC`Ttu1H76OdkKY~E}B-6^&`=s&if0}2tV6*|IgJvAXVju^=qO)MYax_s*3P|Q(1;+0h&5~watB|dWgOl z@_Z~QEEp7*l3heMm4MF0@mSY?2ft7iTG!2?*)t6{OQN&O5tF~<&edltUM^!7S-~yt z5?Reh6FyE?fh$dIwEhH*@tjS>2X%09#`S0n!u|~jNGAb{FWi!fiOH@tL4!T8p99I1 z2Ub-n=tRbdxqt)Qv4d_0a&w?P{Xd{)Im7TBoxY8pFw8oe;N*_yaXKtv#8KD=ft3Mk z@)an-AnYg>B{Y_w<$CvJ0=z58`xv9?Oc8DvW=Qz-sctge>wx_fNj!IAk}le6I<01# zJ3|2LIOTwXjbGFf-1zsJ%b?70D=U5=q6mBm2$jjIrEgM#ah-LeI>ULe*q^2oyKM)D zO#As0a~1+!0ByY{6<{zq5zcabEKkRxp=FGwMQM{KDXr0lP7bv)&?CpH;BpO;61U+e zstQ%7Rx=3?KF=hHB`Oq#4g}7l6yl)zo%iqL$u+UsU2~%~$v9u!2cXw;}z*VxPB*DCuQGNi@}8?Zehb|Nd{WMkEONU57GekqVuZ zTyxl^D<^D%8QVVtHsY3VVK=uVlAKVelO^~zF0`x#B6x!G|+tO1>kJ7OsQfZBEf!aup!Ou9qtFg8}=gN8v}z&_oJTcQnflyqtl> zDV6Ac3Af@;c^nrwCn41$e%XXqNJ!-$Zmv8fD5B?N@_C7feaLDxie$(+xpdAh=VeFAfiWu`cjh%jIuI!dykaCGH1DCXXNV-jr!qMH<3 z?>_2m6&9NC@izuk^!F>^!U%o^3n80qaPlaMd=3avDYLIB(H)nDLShJZLV5zTPB?bZ z=PR(dE8M!UR!U$H2)@UoVaCY&0_gF#<+eXz6zHtZBtrP*+EPY;AQ|qFw9>_^Ut?-U zs7mu`-6O*EPsFHfoj@ykMRhr}wMtH#W4}klv3K-mc+!n{Lon~)0F>hRDh#S{_;}Q# z^?R7Z+|PXHK|Z-V45;{bHeroqIt*G(&Q+uGxOY-rw9F{Ta6Lxx&u6yVcpq$xRw`#s zfiygkoG!@Z2sZ=cHni##TQgT|hCX7xUsR9RMxX0SBXzVT9^@_yTR5{Ke<75}{xdW# zY?61XF+uxDo2hCdhL8oJsd08mxq$Qcsjdt1}Xk+7% z11ddl?f&B-X#H(&P^NGBut}E8i<1t8xxI#w;WjlYVglx$69U1n_=i@Sa4JTocnML z_ndeqETOjJQs#IZ1Cdl$)bEY*-gLq;rp9XL&8|MB+=H!@o_a?#<`@g5F27JkKj#BZ zjIY}T6Rgc{?rkx(=&j1SG<>uz7W(-M z5tWKK2nWdK68nI#C<7#4)6rj*cPJa|V);PNVLnLSuF5M29`(pXoSQH|O2#zgC>_Gs zCPwcWTr7XQ(rq#ueIxF0kxtE%>HZC>`THmnvam>$4D1=1U@sw|LlI4R2^%;(fiFr^ zx|>;XE}86WO_hB%GJ!;}Y<#Q=q?%GtjhILu_>@5zj{OOBO>>It?iOKIX%L#EVJ&ux z0G@{>pdf5glDVxT2>=Rh6LD$D2jK&=ei%5qb%};VG45@gtwz<-JsfC~8zDk2mYesd zgaa>R)k(eF9CFinQ(t{K>bg$DE=UB(n0zTSLdvHjctvGU!<@#+qZgnea3Fa=5uoIz z_J(m^nS3Nt#T^iAyRBET!h3d;MJe^G;i1y+1%7t>IaK$JzC>Ys6%7R6g~V*3A$AERL!FoUvY~MIj%MdoUYlAsd{fQp`zz zbKiu^8_;x=+cDrf3x}aWr+MFABJb;;)X@-9e|mmQ2M9=ldgvaf%2(;6?v%5f&-Wb% z^*52ip$W05t>)x9J8fV)M3y10q{_z=0YVkM>Z5hQM-8y6V~=Mv}U+Y(j*>c-?o`{de-)!gSV;eecXdyUZ4$q2@Mu^!{*CV@|OcL_Y{YW zHy2(&eVi3y)t6v-nu;u$#RRG`B7nI9zaIe=XGwbhcVm>0H~z0HA(h<&vx)*IO8U zw~)OGI(Fn?bW52e{-ovFk2XL?=AU&!O1BObWW_-kH~{kFC2aKk%}uX?6+yxi=Gkv8 zE^DIGtVFq4I#mmo(s?u~HYC~Plu6l7K2?Uu)kk#1`63+tEuS?Y`X}KK5G;CrRTez$ zi&CIw!hjuV1HY>wx)XSR?iAo{Nt1=PQ-z&PMykONS|xKbsplezBpUcAeq@9$YsO>O zrqPpLa|S&|{{`3=@#E?tflH9qs-bbRpUKu^hI3ii5bHZ}Yb%PORVt(QiC!-q;}ko7 zh~|D0n+pCglci=IjW@-dhO#7V-A%wAhcE{@tj}v|@_s>~1qP*0MW~*+Q<}G2J2yww zAc}+U=AuA>Vi?IqrUoVSLhI8+*0^`@z4SYWSu(PzVF9~ z#KrHtt>ifCij-0IUghfanrTv~K1K3>MZ|LJRyvMoWgjp|h< zJby@t6lTqzi=%dRG{NkSI50$WQDue*1xY_MMyN*Q`Q-(U8n!PyFKBi1GHM2KZ3d=s zG5RYl&qPK5&|UHmyroC`l_aP8X;9Jh>@c|MocR(AAvJ%1Jsi!+=`uBdMNNGQ^341I zP29jz5S!PPs1_TR6DO0n_qM9PtR4IA*=o93{%n&9gvfJ;C4GFpO8u9*5iXt|I{KK- zgsvFYAs)3rI^n;B2}U331@Zh<$&V;bI#S=!qt$Y$rtA6}h>vz_*^vQXv9eExLt}tW z4_pj(6UYOwalr4+Wox%pfQ-(b*0Y9XJJwTtJY|^#&vCtz%Y)>paZpxOAUNO(?51|H z=)Rk+-xZd6N~b!gx8BN2297yX)x>aB3h# zbNt^_?BF9_JUR_XkqG7gSCm{+25J zhKl=;)WX?mxJ@=JjyOJM99BF*d?18Dh`L(tEPR5uJ~dlAVW;4{Drt~KrH$<@jV#o3 zK)Y|pO{c6GJp_c#N5ns%iibP%j+m3bRqDR|&By24yv|mx5l%$YD(U6|SKjwQh-Ddzl z*ZlLm+Gw}nq)0GmipX}lSzqV+vX7tPf_sbvd@_Qa!iOieYa`260^w(YsC+)+KJ~?j zFAJ)3yMkA9Xy?QTU_vw_IMsQIv%oL~J!JL{n=&sZ{pC%7ou_0%3h5z7P$_?RhcQ%X zaL2GJcZ2?8U>K7B1WrA)_8`1AI*qbU=Fv#bP}wGEdL2G21I+z!xYqpP`z}J zn!kpElo5mV6uFlBBc`(HlYw~8g2Q-zeRaI$(qSD|L#z}e{w)!nb*!VAB1^%V&F=`ALIgDxGJ$m#0N55t}qpH$DpCrMK(@p@fTlUf2``!O9xCAC&T#}(OyQTEEgUv8RA8SAO*qpU~hj?hjhuw*wxV%OM(Xuekd)nT>0(R zd)pw5twgo8yB^)bm@EZB)RW8-5MW_Ttv4pD*y+^UP*Vqx0@AoKqy_dVD#|eRG3s_@ z4j#ji0r3jJxT#ZNViB{nuF&-@<9cpl!5(Ci+jCw!H@MgOf1MlRtjT+9h+j*KoYOOwr2&KdKDSiTmb2rRDXe~#if^M4YiR^tT{gew z8HT$K7RXDr3$^4)p?Vhb?Tjrrq=vB2rp;gZW65!$YD)3@ixJV{UG5j<%BkCjS3+BMkKntMj0b5^-pQ2%`$ zel1pcXF$Rar{A!wf0oMHfCa@U?zk!|M;;;GORTX z;^c}_@e4s*1aw{flyuv6+efzeqM0~)k(tat-d`rK0&Rxx5JHPrM5F5a2dUHPN)^~ zc5Qu|ZgXkWEK8x9oWNOJejNaMDl(p(Y9MuQbkPs9nD^B=^P;cY+jxpfmgnvlOf9Rt z_Lb35w&g0#>IU_q+R;tmBs1Wxi84qsQO<@zt?fB|$(oK#RdH2T-L1c{*&0@jzAhQ7 zqm1;JEFmRHOye4>eS%o(3`R*oh(~!${a%49DWfXN8x0ONgf{(*O1>^SG=Xflp^~Be z_0uZha%YOrNqqNnLHbRXH%06NL()kaL!qF0Cj8p=WW0bK2M&5hz%3TK(4^$35t~%P z2R^qkr>qM`+D`875w18BF+AHoB>Z!R+uv=}H?zVw^U8yB!=$4a8+op$AZoIaL9XZB zgg%zlxwNfKEfLOCA(x*tf`(*d)2v60k_ztw(;Sl2TWI7*&$!xBQSM27ltFAV%e?QC(BZU9%)c~F*G&OnA|s+67}?%{Zw)Gn2U9i zLYCekb2=`%*hK`C3{WlD#d7ghvywW>b2%qxZUiA3R#v?c3%?#KUP>%$grHk=9c|~= z;`U_T3+eM*qHg0vf!Tn#CF3?Fh&e-*!RG44!FzcQNEYY?(!0YV5ZCoo>UaimZmGyM zT^AK*F7y?VP~_uY{xz*iqkP`rw}Pu@tXB~A21&8w~S*$ zJ_~fjI59i3(H|aUBTm+pD92;{khB8kgwEO1LyxLF>zy0Nu?uaNGOF!d36SS*fkSbY}=20=q;h8a&u zISquHi1AqPt-2@H3w=U{3!cbFRQ<4}9{TDyo&ZX9cX=}S|B@sIQ@4m>Zu`ouU%8gk zJZ-Bg?pa|8@v30bf6?$`_92Sgnf`ik%kwABm6S_ICM8)3n58%RU2a7saJ1rKeqVBP zN^%;mY1y2i=AYk=$pQJN#0uCcMeqD2K)rE424&00h_+PazAo3* zL<2oB1!aRhY|YgB+c@A_ID+WbQgN2G6$`#Bl`e#>*_mRo-;a_8<5c+Evw7tU4j$A| z5CHY4Ob?{lox(7q65xV6$MwxC$Xn%*Z}+2S7UPfGG9bl@|FSI2g=E$AFS(I8$z~wN zZ71(9)QmzLJ=7Qf-d=J&SN}})LYdxrjr$R2>`pQsM|}RqeDbnJQh&r+vYe;&G?;)A z8*Wz|FFIqQCHwJOnWo38rl7gj;&HL9VB*a2`U2m-_v~9TLE zY^0u&4k74v)!j!r>LCO{bq;zJdksd*PN|pCjHA!glMm>baXO4y_m+kR65_=j2a>{v zs&?pC#$xPELz|D|`J&<{_(JX2pluKN25=JHPY;mlMlb;%#{dCwBBy zdETGfx$`?V^TRx&Xwn<_LMYuJHzr063i0x`W4h>P*pX!e3nlno#4+(+l$QQZZvrZY zcyI|(P;YJT5ls~+3j>075(H0C^2o*ZO_vR*WZ{$5eBZ>Yev{B?(4r! zaFo4_;=vMF`84W(ai>NRmd#)PFL^=URT{}x6!Gta){IH^O<>uJU?-~lyc|d^#*+P; zOY?a6YY{TYI%^&cGWij!A%3V3gH_gqt=d6@<&|@6YcZ$A6viGr6uZE1%gqMUSo#p1 zBNrx+*K5C)G6fLOgQu{y5zqrUVASfST+}@I723i^RsLkMNlpsiG$%!2McQ=tzG<)q z#pd^2`yOtW>3oYHCApcq@UFYnVpQJNh1L(WTKg`ZeA@VMcR71@9Ha&U56Mp#BI9$n zlZV^4m#S!)MnQWoM-t;5nfgt7&%)*l4}Z7bPD(rJ-7kG2yXEVCGJPAt2Ym%V={L6? z1hV5fouY?qY3!{VAJShFBkdpPGv9R@Z`L=I(E@@@2Um?=U;9yQJ^$z&47L^2>u>>P z>Z^{A-Aj`PpdwFrGkqq!jgytqt2f>pCv#-Z+el`@R zwXP!~(It1xDih|deDLI|@ZskIwT6}V#t_6CovxcH$zQ_K2J5~O(ya_X8ja?`3tlsK zoE`At+|7-@Y3ZAB55pywA<$gfBdkY@b+Hi8G`AAy*twe_gZAReq-%6iJ>+YB1C{$1 zJm1Kih8tv*42_{}D8Xq*Oi`4*#3Z^!R2LPMnH*L_UauM^Bu7+p(P;g&r&ItN8g6Ch z=q##otxWj};iM?Pky?!YCZu49v_ZX+0mCt}^Ta0J%`zV1TlA4Sn3Jj%KC2cETOMB# ze)sn#w5~cyGm*QOHDnowIgjFB%5`DVl}Mxm&KnL4v-D-b(LojUp}eG9H2Dgh+`Sej z=ato3r1aM%gYMh!FLcItL3!ByK_q#>65{_Mjpydpd=Ol`a?s5H{Aw$yj!?<16ZtO{ zy9z&B=`F4DYSx+ZUCDgo7Eppb^@9Hg_o^cWUzcqiIunR3s_QrOpD*9OXxQZ%)m><@ zXp<)OHElwXDa#@XYff`#twp}c#H8NkOMZ))8FAKxHLZDkrUbuqNEv(cR*Ak+ioJ!L zlN25S8!$ztiZi@N_+fs~ePT_aPq45Ll~eEYUYq-PAN}xlH-NUmk1Ns>z0h+o@rYlJ z56`n&fI&W~JD`mGXBfj8-gxpOz3cOb>%~JQ|7lSI3wAiCxr8UsOEkqR6)%TCut-#Y znd~eLuzf7MU4)E*MNBSMxjNVlc*PM-PxdnS=|NMASKCqK<2djDgVA!sARLz~(tmt! z|Mx3gVZeaUs6-m=os_3out#EvgdgWT__(T2RT}rl1jWG#nhLr@CR$WRpxBIoQzzsj zWoPv)KMl2<@*9`;m4$G0FYg|r@slff7>jNDTrG-xFcED@WkpNxueoHKUMtDT+6p^Y z^o`ugA37}P{8%G$7dSn@-`cytbvr_AdrN8IJof~lWL-nyXuP%^GnTX1T?O$Q56Hf3 zu-YB7q(TKIVkcgX3f|`KpvfQHv^jXgH6M;#@vf-%$hdmPl=j35$Gv%HK|NpM_U8rgV; zIpl{D@J~kk_=y^K+hJfPl7f}Br literal 0 HcmV?d00001 diff --git a/tests/data/example.jpg b/tests/data/example.jpg new file mode 100644 index 0000000000000000000000000000000000000000..ffd46a2f81b1fe020ed0673f27711124110eed3f GIT binary patch literal 21839 zcmbT6Wl$W^*X9Qg1PL+;GDsLCSb{S+f#B{iNN|VXG6aGL1|NKa1lJkdB@o=*CAdrQ zKnU6V-*;=bYCrAn>FWN_eY@-aTJAmPdHV0d-*vzy*ad2>OadGkRiShBDJi#X;A|fCLl0AP8B%`2sLH!c+f|8z!f`XQnmYxyJ%*^~8 z#K!rGiQ^>`Gt+-Ag7NQE{3rON`1qtuloXUq|IhKa6F~X|<0(c076u~#lN1As6yt9X zfc{@UaWMWX0RJ;EFtM<4aPgkt6FmLbp#B*E69Wqi6B`TXAH%=a0soEzut{;qUh=%f zCD$~^V|1h74NlB`0+z1s0%}eEWa6`M55XsR{(_Q|5hwgY@C-oxMXiN@yy-G8F_=BP)H}{R(Iip z`LuolE!-ywo-^@pFrWVi?Z3$W@4!O-zsUX<*#G8Q0uW+h{CjvI+$K+I)$tsJ~rz^hZo@j`1KWf=EcjNIVSyfn`vb4CeiA(Y>&N9&;J z4(PSS|H*@N&76CkaOw~p=YdQW9y~LQVv#k7lfq-k?bH__v+$orI^@*`@gU#x(|H(* zm@T?0CQcQ-qa4+W?JDPB5uLeMc&((B>s#aS_7xb#{xD0dilJmcIfFaf&S7n~swUWc zqP%P%>|#zyWq;nv<=CFyuT*y9Cq@uB%L9q8Z{?Vo8!dbU>{eUUnYqgvMe+u5JFo&7 z^D-v7OU%5=@jsHl_Qd8Wu3F4j<~jF)qm~6i7-el}ns#f{jp)P&d6at#@%py9NL+mm zq#uSOxj}A5!|w!2sVlMuf$Ytu*XA#AkvDf@ zdO|8_qbDrjc4bfxGJ3#ix`z`_MkeHGs-DViaVW9`tMp8(AfnbzpVexhBtsqs8}1eO zA*NF#n!pRN7kVS3<`6qAQ0DVAG#HLz&pg+|wOc+>H)~cL!(%M3wW+g?;XcqsauVYh zvuFZ7yAG7tG~m_0k9xb+F1|_F=2hm!2`ZMDe5P=s!b9f8YwK)X7GW z&Afe11w=eFSqVq?`7upyF(uQLfZ)4}8V|QP4|V0PBOOyVz?sUES>vc!sRg6Sg_&V* zg(E{!ux|K@xC1Q{s0hBAxQ3SXCgvb_qb#*ZFWs&6B&V(LtTHBMKXxxC*v)NSj&nFP zC4p|0ey(COSQa(Vey5BaQI(-R^gT7Gm^5Rb7X zo8TMQv5b9Yba&h=`8#=~A0k{^1K{U1FPH#L+k1sfEwtP3qPUo)aTVC*f#b&xsLjFZ zv6c?@iZ6X0CZJTS*D6+)ak9FIQG=_p=$%zOwrFxeS4X)u?TKmR09LO!*UfZNOae#V zcV+me?2dY!+WX41pf>4Ic6HOtxayCp7$i0CHQp1|uthJ84u@c9JC`4oRT6SR$dap+Rv<2coZ zqd8ZQ7(t@*g(H=Pp%RN2%m&7##^0VV>IhpJ=RmM~2TS#F(4$fYJW7jQAj^9C3VO60 zCX5;|onYx0q&nH6rXuC1MFJ8fx2a^3R_IRJ+?Qq}I306royX;Y)D=+ms^(ia{HQHR zUMWtQ!C$wVa!{DZO6oxExve-yo`~<`iLmEJc1n4los7H$C$TV)X0hf5om(Ltm!fB+ zAwuBV86LCuO#(N@`N_Xr{ zndh!uB#iZiB#z9K@DWu0!F)@>0R@lPm-wGC=ce@=@fc2xxSOofKYU*}1@QEJqF$b; zqvcZw3qSy4)~yl;QNLdEe6OS{hToK!=gQib3&lRaC0Cm`sjz$sa%eLP;wG|7XEgsT zYkta3U(Zfhjdi5@+d*9$wxW@7MmOZyr~_@)(EM*`%u zr(l`j9}Z_@&g2X;Lr=o@ymBI-HU(3|PO5ciUd;G31~WI41pelVy(gnkrHz4%&HK;a z|2T}%L~Y^o_NdLgZ`%@kCXq6r?CkX6Br(=@U6h081_0kYor*D)^I#SUdcR}_-&cGgy@XTF>H>2vu~%_Md3_m zxAg-QuGn>^AsXV){LI+jWbjBus8X|fTq(Q>ML{aW8NEw;(5=-E%MJ0@aVi!t9b(+I z$+<~nMJfI%?1%OVQ$%Rtu^{szik!0KXGCH>u^yk2z!~|Ex||~P!nIuIM>^Zz zn64`>eG&&tOxNkImBQg#{NYC)qvCI9XdNlX$D!G5YP>|maN5y*0X$>Eb112r)i&td zINBsVl3^}9qDY0>N>(VRqgzyk;R=uZ|xdFg{H1TycR)DelOus@r{5SX`&7jyC|-kWY(H8t)e*w z$%(V<2W`PY4%=^;%tsw$F>#KNprsdFW2T#<6F)_!8N*XL91dP{rNlcF-58$8H~RA$ z6R>Z{^ZUWoft(FtoFfsS^#P*$HB?7=VX zVvss{>fDJu*daXiLHBXqb&X<#Kck7#~Te_iH;O=3(*kl z9?OKzIqUF=X5D2m-goklJ=TSLy&knP!EJ(M_=h~!=$#`yjIy%FdS1moHXia&IS1~? ztJVX+!_u> zC32|MTnAHq&qNmWeWeFo@T{A$dQY{^ksWGsJf0)w@pCEw(*`j^7T~!17eHeOUG{|^idqz6Y*(uM1;kgjyIH)X{5=Wb z(Ab#;*+(4+3fTMc_XT;QB^}}dwLYf?uto4?%i2-8^|28efZz^!oA6MjP!A4nNOc^Gt0T<$j0p<^GbzD86kSH?_$G zd58HN)vZEZXa2;gQk`gOYde=r;v$r+@5sU^!}Or*SDh_YPxFgxMX>`i0r zDZbyd0__$gn zW2f`RU3jhn)x_-yxUznkFUT+fEOohUiT=p#(jb$kH~qQjk{Urppo3_kt1xhH;?M=c zbw3v)GW-gJ+{`w;2U15KTlwBx1Jz_@GW8|A#`TK(QMi&k6@jIYV=|<`L&7YL5oxB5 zhJ$7m2;T78(sO{Z7~$x~0#|?fhGAOnthAg=Ekg_*rchkOp;O0P!^(w>N)Odz*=}J^{kF9|iYibVhCFjwFnm|dih;|qf6>PN zVQCOX)ov0)|Ez1T_L*?SvJpzE%xG9}*=xTQpna@B^>C?Q|FnnGr&>CooGrB6FY3*Z zKm4lPur*8Pi^7>FN8}Q7>@Y4}3_g84Y=1o~|41mkBZhn7v3@(@y@^!U?3YKZUdOka z2TI|IQRau-juKuo`7;b1(h&20heq0|Mq%LYy?o^3qQuCtwQZy zJ48j?k#{y*m+sn<5Z6IGj~@IQKln_kr>V1I(z|TI0#g1IR(zT>GRwxo!S7+pJNY>4 zyc;AoyJG~3mw^Ui&f)9?*C)m=as4v$kB83sTervLy3n9S=Y>%3)}9lU8geST1_d!g z^zcdqx{Qzl6#3Pw)QO(PHaCI~0qDPg@1h&gqq2e-y3%qLzOQ>QuKN`G>pzZ*A2*Wo zPgUD`q)HUEexUue^t(@Ud0C9Z*%5!uLOW~9l8W^k_ST_eoFn+am@-B>xuQX=Y`NIwXEp&UPJL)Cv z&74n~P`X=Tq!_|ChuFYXmC0)7&^vvy61O28LM*Fz6K3IIkT~WMGd1YG>@EWDoou#8 zdbBzD_DYM@sPP!S@xIG_P0zH)Fk8m;GRq4cQ-F{cFFS@Qopu>-Hsm#GWO|iqJ>y;N z^?h&XoHbRHIDAO0knF76byJe`ZuuF8dNVrDKnCI>T$(zolz?M@-G_KrrkDxd4=-G& zA1V^Tc#054Px5aQtKk?JQLs%5XpcCJWYK`cJ3{NM;0O~Cnj0dp(4OhoNzrWu3XXI5 zR7c~(x(I?FymNnBBp%wIi3LCIOC>Sa7%bo!SpH?v5W%~pqsF3;VHYC=JwRW{Ic-QP z6)Y+DwY_#M0&X!`vpbEFw*?TX1Fzbnij|8yPCp-L8y!?^6Kb+a_mn4oY4-SmM6mr3 z_{xvw){FGaFuVz4sWPtdhO>{rMe{q+U%H_@bIdf0vmP^SrB{-lZ}riY?BS6Y%KG^S zEvgMkfC!u}EbROF`ru)Ky^;>;Rv2SeYLa8Y+Xil`Z*B4d(G~GRqKcy~KL(at=4jUx zBzsRxk7{BQHKCa%znCN4-9={&=sbmEs%Zzr0(Gm)jY=gf;;!j3zXnW@NP2pqC6sIP zZr-_d^B?8Zeq1<;(MS5`Y+@Zy$M%LbXx}9^%rdIN^Q*zwIi`8-%c=^mx#QB^9{u&f zTW8@7fiK%%F!E7+KI5|rJ?jFN1x?F~|IF$2I9$psSxA2g;KrscaGME`|8-sYmA~~x zOR!-A8jlLNmqQ>y0sUoT|2kTqg!4JVEpGuS{UqUaDxf#Pf{-1e&Y_RJx* zg*my_M5XGhajFG?J_`NCR$%)F-2-rwgu?zAE$K?+`;Uoo3N`>&c{B;+rox0zLA2&1 zyA^oswgO#Ys!~uJOzh6LEWrj@!Sr@wcugQH{dDD~vOhyNa-z7%oMjAK+GC4ebr>lq zyu`%-LzoCL_92ySQ+4IhZuJe$v`51GCuGR8=5lM7)XD8^JP(7wH+As_Q+vTuto96B z-?kmKjj$^oWHdCBuQ@`3j z^xN1B^9+`Iqnh+bXD3@hFQ_^D_~5Z9sc~R2wH2Lj?pJ`vs3lc(dfp8Az=3vQRlk7C zJHQ0s?K||4ENg>`J9}NiD{PCpOk8YusUZ6#XMbztJ4i)eLLY3i5zR=Zr#hQqv)NL5 zmoW6f8&6x8wVAk$8hO|!#fzr|e=3+3%n?E)OX#)F+QXeOZ0E7I{M|cpB(v+2K5hpigT{|5usJxJEPlJ^>u%~4?AuamAOu2Hh7j$9 zZELug`0ZsoSo&w&9(AL<1aYu7p&QmIqkgs@Q=deK%fT5todfJ&RvBAdm$E?WX} z`x5wLj=6-T-1^QV-M}nZi67|I6_9f~b*JI?9+0rKS*b4k8-do$^9Z^^jHajt!A%i2 zx~Z zQFjmK@^vajONw^Fz+ud7;lj~CQCyRY->2mn0fg!?WwJ+xn8?-j=$XO6 zUw{Mi=8{AheWCvJyYcxd~s!=1(}j7sGve zw`9w2#TcJs-R9GD?aS|&s5#Q4Kp>*L`0E-4K_W0+h>O&~1N+H#FoT5x$OrAd@FgQ7 z>FbsNxi{IHl^ibG*|#2tZaIBz9Q|~REv+*w@J+l(xn8%V_@58u%U&E|H4Se#K(IaF zW`}^0v(m3Ye4MUs+H~;p1AMnfQ7*e;r$_Kgp8lq--lV=`2{t8|7Cr#fJuwCM`VmPh zY|qjX3LWYv>7TrtA0eaQGA^v(8|QcJ%^K^Hg#j}Mt;v>_0fsM#yXTtDiTUTgWT#?1 zIVba3Bs2Ua=wFCyxEYhP!OBv0xCU&S>bo(S{0sQ#&Y{+AOe%_Nevt#+M|q`xc=>Fw zezu=sp0S*3t-n+oLusX2K1G;j=oUyAS>RzpAJ-FsT+U>h{#EU6|J~JGNN#c4YG61e8@TmPul@D>Zi8 zQPB|I@({?e!@K$+EVz>z z6vqP6`)n=c&L0cj+vs1boLlKwbt=GIUzf%k9`o_XSg5Owmo7~ljNq`&nfH|?@z$Q$ zr72oFRZQn@w|&N1Bz5w+XF0h&!6}v-Xs(8vHe6p+YJDO$X|3asJBBfzHW6{jRf{Qk zfBp3JR#z|oRw@5DkB?W8=BpSVraumB6aOmHKkN5_p8eZD-IUfXe#o{m!245I=aVh) z4on1kOfA-9=5)>}7h*sG_S|xu=m809&VbLrlu|kCn6$nCj{K&2MeTo8xT#@{)!rrA z@7NhDDPrf?#nqKuT%PjbFQBi*U-j*CW;10M(AK>kalQkB=lgd2W8pE@zYpC}>hu9z8t>+Ba2(wwbHP^_%+wC>Dx1)ybH$o=qukeJn%)Yx??diW#_BC` z3ppFup7z$sy14h&{s`{en`?KGC-_o3)h4=8cq~CvAIG+7Cs?BO)h*|H>#BC^4Z zaOVBU>B@l+AZwn5Bg>hY7pCUUmLA{L1P=vyfU)Al+5Ihnbr{-|boHb5F{ho?oCe$f zKBdJyl)yTrCe{ay+K`;*$u~F%RV0IquPNzw1aR z-4ii=AwS!fpAtaCj^BGNbPbp8!6EL>D>-Aap9Qou(CR8WI821FdCiSacxZH~c<-@Gsjou6FEOR1NpEOG=5Im%-Fv0|xX&I|Q;|2pg5G5AZbv3U zH%bR{(iE~0t1qZl8{<*d?~QvZ2`5EWFfMxR6d{nkpO5uLZ-{_uM|$zaozptzLph~g zzw*0mO&Xr_9=;GHuKB<#=Uzi1NBgI|Y`Ws>v@;iKD`lZGn!1+GhyWMnF;z5kY7Vz3 zxLiU|2=P=zdT%ixFRUYz56aZ7?Ah&AM_uT6Ge>j#+vTJn1C?BerPd!WOanJ*v>(8< z98uY_E3Uri5l&;a9LaP=rSj_c0|R~kq~MGGLPNvO6GcwA)Trlgxi}ie^k{nlxdWuo z5mZhJ|6gHNWo_XI^`e1g)X4&S;@ptVJ!%G^Y*|6z;w!F+b12cyKtAO_dFGurwVZYa zo`;T&A8;JHn237})AQgK+WS`jY#@T#TcvUS_IzN*cZuJ5xw$w1Kh92(J4aLI4~x@@^$z~M(v{0CmdDhGyU z+YZCIUU+cIMe}-~S4`E_T8!|Qg=35PxszmM)9BZpW4W$>y@!D0r+(1!%;vU8V?bDxp=Q3ZM1XcV6psdAc+9#C? z$L6L^RLp%s`*4gxevAWHN7R9R(D$_}q$I3`u=f#lm$WU)FYhA9o}k-s_0GxXt(%kz zKOfN1rBa2}z5yRjgDV6tqBAjmAwxa*u(GfvM!B9F4ebyRkD`t}&v(N=8 ziHQ*w(_jUBdNGGejTQz6kMBFQ#r1|OS>UR9X9*M0Wf#-+_{jw*spvv8=<#whd8 zSPEU>*<6jw)qcp&{G6%!otaR~)mWRlai(;DW&20>%)1I}0lEn0K4khzIXswb7h^}p zjnhF*K^}ak_b^HOjk`?M=J*aU|B>m~>`Qc;Nx1H}j(XDJ058ViOe%xF0BB{+Z#hMl z;J%Bdy9n6*{akoV#JqdUW!%*rxM`NfVA6naFEmu+aPWQgzPWgzV!7g>^U;=FW(nKX(=VNzpuU2cU-g7Tnj@ej4 zr;o`$_*%IO8#?cGxa`!$%N|W-QbAhA3S_)^(0ZZj=X|V^+xhGX3>B z5od!C3l0Y@XY#_K7w73&)fxtE2wQO+yA_;tzD8KwcV_3#*W4V{$}6oeviREhAs9frbw!)3-XTweD(hQxRo{Lb4%a=?E3UH}qVoenx5JyI zg;8cnvrh*G4B|7wX?fIpN(b@a(^YXo-b^I0T0dvXr56X+s6uTcgO82+YzCh*uMT>> zpaNwzJZAW`9P}!(jR~b7Z&W(dvkskg>MfK{-jhYLE~l41d!b1>NJ-%W%IzCen87Ok z_?xFGhelnu`w1p=6*Hk<$8+BtJ3GvYwHZc(x`x)Hoj zt^{uQUwu!0!N?13tyOjLan}dWPZb&d01;<(jav2ckzf2|A5wZdCf|e`F^YH-rA%xP z`gX0;Do7Gq@x{jlG`-UCBRSZx0s5}2%Xf+ANH0|yypH%JGGI^ZZqm0gp_B0A*^}4m zRX}3di)b&`N$UnSmqiMLktxtXcO?{7GZbj2fS2t*^?CVwY>DU|Rf#p&Wo_vtrT1{s zy>7{|x!Dse))LHGCjnf&?wXjt0RMWIw+!alMiMDwi#6|XKBBQKmp}XknC2x}W{prL zLP@ZiSZM-3Ba1mj<%s>nKaE3&w5lsKiE4XsckfbQJEil8D2;V@-?B81Xzgqx`*o5% zJuYhDuX>uh60WYWoX2|e6X3Dq(CsVn&*_RBNh&4`ggcT1ht3t7*r!wy*J3xCK!`E* zq}+YNV)qx4S_A-Pgfm7gEa0J8^?{R@6yXL}l~il~*X_BY^XS^=DC1@rA@tqUlbB#! zrMRX_!H#Bs#K?#@+LZ{_^GqqB%^j1sz_#A+bs^2$PXetj8}Y1}^YK~ILwwvNOKb(P z-;;U`4Ygiob)V-MnUv5kRiDxs)I96Rpi3MUVc*{eX4c%jXk@*u>hK0MELAf{P$+5eL`52zKcgp@XXww z77E`1&CscAmZTdP-3>8&rjzIMNJDpZ%03*e`MR?PLxm;~A-vg#rrLts;9qBR-z#Hc1^Pt<+$Wfh0)R%4WVl>y@D6q;oTV50$LY4%t_l_ z0I#c}qaoG4UiWK;a2d5uVpWabFYoti^CR-U-9t$#jnax#k4J#xv| zqyIA%0goY44UTxvlkj0l!T1KnR*yxf8qozVq?VnMy&PCiJ!GF%?0em7C>Spqa=wKz zpL?Xo)S5VJMJQ_pHNM=*4yjON;7O6|I(aX*&XI4yB^5)rtrx#E7-sf|1G6_DAtsCw zREqqt8ZLKWd`g`#S#Mva*z`3pyY8~2DUIFl!1zy0%~M2w${Ey$ub=Xgw8rj6QPcpW zmZUhdjrAsz+=zQMAU7m@x2avi&pK7qcQUJh0f47;9Z%$I;)FsN#bhXqkvL3~r2MwO zbV?|c&^1q;7-;BBjAKhIk6@M|H=14sgM^e&TnU=Z5Za>yVPT8B7E(+ad>4r$wtsWT zS4oDI9@}YnF(psg3FTF}!+Ux^#R{flPiA-D9R{=TF$mLy3xN=q2K##K2uTU816#q{ zi&D3vmTGr?g^%+9K?>I&86~P+F@M%c+(m!u5Z>)o9cZYFyu=xk2yiQ&h4Of%rJkeQ8~pw_J4x?aMG_DKGcZRff>AiuTAuF};LfJL$*} z8csfEQhuKJUMFET=mTsg*7w0^lg6nm}oCSwh5p&T8bdsuoZp~V9v2?iq840cxn z&#%$=zhe$fFN7G{&Dl#$MR=}{^pX3X1KX}1bNQdu$fUxq&5^w=w!fie%{+8G*`FVf zt7@9i^oyb^A+qU{9$Y3lse?H*s)E{yc{cOawJ^mfmr@VL!=bYtxkQebV{9)mpZ*88 zv89YgTg$ZB2j`oo^YBa0t%r3!00MsoJY=xXWd(63nL&xs=x{lu$Ct z)!HV_HF$CGdxyWkPBd8RSl~*O+JVg_|G9g%tdL*2_AIec`o~!9*S6NwU$3;Q)f`5D zlFNOrNz^t}cVRVn5BNs)z|laeBv{1;qWF?WrQv-)ePV?($t0E?k!j8Q&U?M=8mK*b zNvj_;khoWd3E3cG9wq*j zD>#YP`m3)gAaadv5W$QfPp_<%86ri>b za92HVajExw;x%SD=zjLMjqE@9{s39`sG+*ymL1xRR#e)1XW?;6N?F@<;ZGb=YmL#Q zi?!f``n{}|K;}CR&{G{v2>yLl5nY`lnt*wQHC?aOW-_F%9S?2gAzW6Vb!879fj5IZ zroND7*-YX?1ru_`qfq{y&WB*8AlRP3XR?Ykd7=SvHxrzY^j}uIZVzC}imj^qEV(TH zs=Ph#%Lid`NAy%NBR6d`T3I2Sb{3vOx7Y4WKBv(2NhC47g1s~ z@&q)Sb+?&l{h=mp;E+#4xv1zoF&;J(6Wm#LTNAnE;;Q3<_S04qNi8@9lTx^qK>C4$ zU6&583zF)qN1a!GFqIV2Fsz>&pA;0B@FmSQ7czT&m0dWgc*ZFMeBS(tE~i+LqsjQ; zYYq(l{fr$$ygc$(=%(F=*GWnt)*SSsERyX9hMPyf*@;jiMJJ_7_jYfTDS!6kW$KHC zb#d^!TVh@3*1!DVGRcM$*(s1?al$lGaknR_bjN??F1(}JV6Vtj`79&Oh?cz#8Ld!G zoz%)N@clCnA7=hk`MYN;gS?sH`{E7(G1kmxu{+FUzgG%d8Aao{FI34)z641f#1L%9 z&fG4Q`U%Mss{Saao=&}VV*puVTh9Lz8$_-ShL*rfZklkoNEV5*fAh_2v2UAOq49gr zdBq#wtyqf7+pa_Z&8m+)Wg@^BOG{iWzWF%CzW~V;Pq@~ju4vq@6X<3u(fC*LlE|yp z7j+|%)jKH%6S^BMf~O>xC1$A*ovZIB1E5KPbsr|ekNFVs!=IOm&|NQWr}n30?tY^l zrUDa0v#l%r^dop098iu<%GgyRCn>$d*;HA=Fi$b~Sgo2pYw4HI{M3kvb2cqegNNqo zq+WByl407vfYf4lx{#L2NN>gBm0@Aj~!z$-{0cT2azcfhNoCj&|n7K7mCpuzfFQ zcczR4K~1UMYRfjSHdhE%lFqWWV?gTB?Rn~`Jn%tg)G}oJA_&J*ySu44K*nU$w|>55 zxRkzSD@ivl|4*KD$~upY#B7+!%)lhK&RN{mk^kp#xiu^=feP0{-WJyMMNOi<9glE4 z$#BcH<-wPiY2n0f@k)komIsrAyY^+@EM*?3EUYLK670k6S6;gZMWPKUXoFHD)vj{P z6;4S;e2BBJ;58gtw?`(*2(wO$GyOd))edjiWNXns z1A_~Up;0aw*~Smu0{cr|v$4sJ6i*`a3w@bWbA2rNLEOq%YMb#JD z-B@iq6ZqYq{vm?#^YE`D>NAMHINka?>ps~DrbuhtpXA09t`wMT(`kfYs^qB5jV#UW z^lg}iImjvPkEfuBC^~r5FB|ly8Y;XnsD`o@uLZiJXOdI72FksW5x9EB{`#C{2D9(3 zCds-pSTRW_#+8wp=JjG1-AsY>k7XqzYOHtxFQWDtiPRI@(iayHLeQ}U1R zp6?{AK@5bh(Rt(o(ls+l{9zD`8D`3UO9ev{voJ zM280WO`p0Xbl}C#&(I&N0)_nLUN#k&?iB`ypBcS6iY2u`4#~>&U>2q4P#l@`H zo64I{Z-y8=UpN1=st(5wbjf=RkqZBeSB@)m;9r1V(7-wVQ!j$I`|grz^`=u8OdE|> z-jZ9!O37c*o=O!~$3An7L;|HrR9l(1s7hi-ou^mFarzI>q%C zKOp>6lIH~8tq&QPytsfQKeDoBv9~l1?IAbTI_vrb*vrWx663!)oVEFpK8^>Kj!9T} zOtlT&Uv4Y7%OjU_XpQ~}jix;o9kyc@Cnp4mCBrXya&u1Wgn{)qdyAwCa&W`ITdLi< zylJyUQ{69?TL9|bB!w2mFuV1>(X_y(8+0V<18Z*I&cbjO0BMvlV6w{VP}mlYzox)& zh1*cSDhToZFwAIP10y{#VB9txkQ1wdfUeQ|{zF^(hgKc=d}}Saf97B?((j-FvI6*pW|GXRTB|+@E*2@k_MRiwU3SXK0QIJk?+Hty%WF+$MZ>Obpv}07 zS?GLK*MZ?pHbq9!NZH(^v6G{hH0`c&CUCDv4gVzcKrQ>_#qZAbTRGCOU$8k|OE%6$ z3;b;WWN!soU1F+QWV+PP6dTc~SWNUl*v$}_e~pr)ShoLAxojXcWjyKxR!$?j7?4v2 zh*lnu0ce+gw9C^lCC?qN#2cDmqa&7#+yE?0B?@SuZ z<~khVK`7G4Gd$|lJb|Re5ULI z$K_XEq1P53j}>$P@>tE46h9jyy_JS81ZOlN4X!jh|L~Y57C4Tbo5v5&dppMJ9D4Vf z7$4fuw@NBqj*}bnC)EXbpU)S1-HJ3txo+dWQn=Y{JJN3>TUFBdskl9_`^GP0W)g(1 zB=zoxZV5QzlYJ0&2XsNur|0frw~l zII5=v3@tS%Up&~&ePqMUqD0~guhwDAOXNUD8Zz>}SOO?rW!=roO^qvl0CC#Xr6;{^ zQ(XyaUWsL%n|#!Mk>aXDXm(X8I905$ZAiQePRRfIP+n2>E%qK)gG5-kQa)jQSF~N+ z74H*l5rpQ*m^#xbGyg15E56=P)zoZ*VIIECbkjNdu8v>jZd$(szSX-F;|hNAIQ0v! z$Op99f_g)#A!^CeR{0$odMMI9-Ri=kK(*pM$YaQOuH6ygc)qxbViAQF-99dTYpqQtTGpR0#B-u6bj;x_C$;Fxy?K zFcXG-EikrXeB7)WmGI*0I`uB6*9I_MIW?i06{q6>2!dtuii$OoN$E37yW{P>7mbYB z6B70kW)`Rsp|7v@X2V?{5AjGuE=^oD4PLrHxtm#sq@N17wFGR_c~xdHDv6C$tp&OV zl$W*6psp4k776ip$*?uO-p1`VISl{MQ~<9;_qB{5RpUHceNSd`veHub)|$(OnP>Qe zKQVYmrN8u+^!+Ef&(yTkeoJ8f9aaOTmlSwjckf*=QJ;x&w02~O0_#G|vkiX#)@u=! zMfr!$uUXx=_Ty&l6okBOy{HIKd^`L~%mdm`tQJd0qa4NT&Fbm~A85(Mhw^n%$Z;uL z|E%3DLQRA-*~s3BcxZQv7_qFYtLMHilqWnd=k%91u4XN{Dr<~TbkR-2P^ZKy*=%Wk zU18*HQy=M=214+Kk9v7~Pvv3i1Fp;HD5k?|ijK`ZvHkUPp$7>f!3VRBp+E^8&&gsR zZ~Y0b=#ue?P^bh0TwGESq4yn^#$yJ)HpL$~bGF1SUTsX@J1VzvWKwjKW$4?dc-U9RH}B+?HO}JwsekEfTYs4O&h|-9&Z>7Vq-mp*OrVj z=`*kcdu-%r8{)p9^qbThhqsqb?ej3jyzxruGxiQl+rUFpiI*3Mq?I?j3Jhf6S_*pZ zahOYDNqd{aE$c|sm{KE@f0Sh%aa$91XndFcC7+d)yIujH~4+!Z{gqn&0`=b5OcQcq)F*o)cYSg>h9(ODbdoU}??*Jr4} zw?qwVtAnuS1M#FxU38O z64uZ!`90KSB+-AQYntTKu&FlIFSmrwDLV)^P$w^U(F0ivB`@}CNG6JTAd?h3l3n6h zF}N0Ab4}^4-k5X=rGU{p$iv-xF^wAiM&_M?y156@Y{48#D4nXBG|>p}S2WuogH>qP7`K|# zhiGs^3-{V`UG&q)9fMoiu9={2x-hc9^kPFt3G4K4%N)vt<-2kVAli+7FpWpu<-kfn znYga=8_t6Eh*P~r|9F9v2q+z3Ip#94HJ23-;P67I&Uh{7C&tiWPm7F+eQo>N{@m(I z!pQM@-6dH<5;06Hk7>M0V^yz>PxWt0d-+KX*ZN-AKZ&V{kZfflY3jk`R>z~e_E2q$ z^H*?jvVGxR3kg%mz=@bUncoFZhYR;JNW4NWIWDGWfLl_R4z`_w`p3LP6(4CAHIFg& z9l%;js>g!|7OBnl#RvC#IR)xgHX+Xwty^R8?2NIyZA&Prz{Mj0JleG46`XZPY%YP; z*@iYL5jq&LpXsH!MG^MeW3A1p%KoxGhAcVT>GJ8FoK|BH$rcxphjgleB8Rwn5w$GA zsY2TiZ8;n#20^9}JhPasW(Xyu7=L^<&?T;&eqhxD~>8il?fgV{u|xBydta_5RoDHGs;{M{?*FrP!y}DR1k! zrz~AL)kwriKHmF0l*i1uj0h&K`dQPWz+|aggxm#N^Wf{7Vgu8aafa`Ged~C!R%T^A z;bNfsJ-Vj*Ss9ukbi-djL~~}j@rjC!HK76N$@FE(#*EcO4+2eV0Lne}4E_@HVnHTRAol+?NL&YU4_p z%X~B2zpvI=4}Cl!I6oSf%+eBxZ9n;4)8u~A#BEBG`l|TSqkZEI)PyvBM@y@4d@ zpF=>zJN6;iGkM0Ua@=W!{b&i9pC2{s(*}Gv6UI{&lYg$|(?*_h%v=rMN!Eg!eKDER zB(#DD7ZF1A1M^Sm!{4+qZ;89lYm&;Ey)JO{X@W2zlm@ideJ|@jmX}Pf;@3 z6ZDPfBok2iVKkr2Lk^IamTS_(V9iPZU!W8o-1-Z6+b=Lzt-0goU*mv!?Z*_?H&Z^;njA79{L*iG7Ds9-@ADe2 zE9G_h{cL@9dg1X|$Gn?}NS)hdRc_}tEQn;PsBv2B>625Jr}F%nh(JxUtx~#z#Qr;w z)~(N>hkLnk+~~)=%L1zkLk0xy_*lM3(XTl}jD*s`wN*S9HUVV>;@@x0i8WT-kGzWA z;!AEh1KuJdDk_5Jz3T@DVQq))i<%<`GIg31A01sPVBpdt+wlo1H%4i> z^CANFdkz%oVnK@M4ReE?!Zp>>UX=F z*1%||zfI<;-H7KSUM{Q^E?CF{yHoMod36UA+j~k=MdeA90J92AVQ(Ntew{)_}u5H3Jl{%DX1!jGI0= zD)#Q~vp2SsEmMw@eu6LJNOzu#aVGWt6;%Q2Z9l;e9s(U$0?D3A(;V07)$b^#6ssqT z7q_(Ccw+ZINg&sIg~lxrI0~9upIG{svlX@hX@PjcJ8m)l07;PjCaY@E{Pn_5$C;Zf zPu>q-tJ~}SFgH*Sy?)$kI-ta}q0Qz1%${CP0Z9oXN4-lXGe3I+ccTTb!3P{~YM0y( z{x@k^nq1a(nS@ra?kEAGnTU>S**v2j_-I>Rs~Sj1s;aKG9ryJ$&)b=;a!keL~#i?GA`U=<5eCYp205%lK z>t%JYw~h%5EVJ_%QWPm-LCHMuKfE{{*!mukY&QL{T%<9JmJtFIx{?lkN|E2_0i>;~ zYRd2NCC2+RDdzirsU5ps88*lQ$`j>_aX7&B&IWs8v1gF!sRsA+CZ7@G9T;GLU(&T~ zY|%6e+xyEj^HIWD#FOE;DDK_3A1*t5&FNUz7IKjHcTTY-!7|4)jN}k^Y#e8)>A)cK zU9ewf`XEu4L#EaA_MgQ5B9K6@ExpvxtSEV6(h!F~mmbxNYjC%g_oSd_lWdl`=L~?k z#~nygzwj!3*0^PpU!F9LE-y6*XJT6*VWeVyfZ*f3X2EvWEbjw5c~^2t0t5hX$V%g% zlrSUcO%j7^P4xUM#@(IHFJVH{apbwRbEs`LVDrQXFsW1Xa9j8sNy_A2Vny6cNu{^cd}1miJbP zXL3BD9D?7g93;v}%C{gE+FKsPjP|S*k)*X)pxx&xkk2nM$fNmo{c1?AF72ae?x2xT z{{VL&fj18<Ebb zKe$&k^sQ3cPSosYgig}Ls{Iox?Ig^BqlG)h_2%`R-&#C~eER z&wsu^;P43GcN9tz<{V~=cIkC#eQOik3pmEbj7)ZhY<$OVNiX^2p19(t(Y1dtMSCyw#zu<7>t zpO>dd*HD!TyCVp095(QFjxs*)r^-9j&YEq;T9=|%L*$M*r*Q9T?`ma^IDlqC&yWKi z25=dO!Nv}7YFD{`55WWosXfiU8fb!+QlJgUM|^HPl78pDDQ+~l?CpQIrkWJIm6lko zOo1a;BxRhF{q4MUJ5R4TtEFmI@m^je^IhIrwg`?==`w;8h6=>h%L_Zxyvgq#$HJyJQMxkIqfc=soPvi(WFzeGh%o_BWXe~z?_Yx zR02BieHOBHJrd^j?%5q)CyLHF7~E%x21{}dMh*sZ*w&7T2$CIM`qmTuqOpi11##v! zLmn~15IF#zy}1;opq1OcpV!_yJ6M8UU80e1QPy9d&XLuD7|M`1=nmpB)P5X6{i7Yx zmfn9QC23hhi1ELl;~o8*IRtaXE~9A(lz)ltZnCwl@FS+tcritK0Q4<5$J2lFSOb1wApOslh3!b z`IzMQ=Z|b-gVU`)N7Unr7>&Kl6brfI00OQ6+y)0A{G<#Xhd%u@abC(VulWO$Nctkz z#45`?jT1w8r@0b9IcHsn%oo%U!*5KGJt`jy`0mzQyK5OW1UiGW#QWq@uEjw{;Nh4m z#-t2nvk}nOh3eiV)ipcF?-`OinZMQrbMt-GRv9~Z0CR!PO-ZD9gu`bt$7+ofk$s+M zsB&r|WWs(8+8j}u4secE$GjYiI@E)h#prQZBV=qml(q;iRQgH#Ma{}XXFQE>2?2=6XOL|lC>cF+I`$aN zQHt3oXr`2-Gb1SSaCZ>fu)VX$=i0aBxsu+}TgzBXX7&^b<$w#Yky}2a2X|0Cs<~G( zh$duMxskl*gxQGLsf?#58TIITWDoweGVDm#fK4=ttOne4Zk;^l5vbKJ{aI#8UY z_OxHA$Fa4%yn^QO?r%&vFC#ut5kNQt@E{7wy}O>vQn;5f!vyjdEjjs^k`8&~eMcQ@ zcU)w$yq;TVBZ+p0NmTE7bI#B9MnC{|1L;_IIznrbdGRvAe**_(!Pv`@o!R8z4xRI# zQC$?F7P+MKJhvy!V|jI!zk6tWt1G5?4#&3nS{BZ5$;$N$k}!WWp1a~*`}khlK+vt5 zXV`5;!?u1`nT|*++=a;pw(ol3bZt5Q*>44n+)(*b8D+U;K1c&BS%+Pu?H}+gb*(=g z_=4A8md}_7^D`^W_*hXQwke3f5 z6lWlU#Yy9F!2_wu6=W=Sa$AVzb+$y?H=1@Bb}8JXbjjYI&z@_RySS3ZcxFVJTTF=* z;~^v7eU5SS4EOyGyc&R;eZ0ZG%1;`<}=Yfvi=OU*;MhPyacCm4_=os4> zV2E2x9y8|<0t{t|UWX?xRON(!B8QnXNjVcIiQa%ZbB4R3$d|`vvAal3T$T4keD>KoNsv6$Az> zaCklXk=p&Fw-yMlwh+xG<&kALAT}_h0uDlR#~EUJflt-7mzL61x4yVr`^k>zCYx^8 zJTL%Yb{vkqJMpS=T7D%YjtXJqZIUtjnMVHruc!5>qqvn@3V2+Q4?sKOv}`_W7(wS~ za6N@!v$vSsp0(0_+~= zapUr?IavA3xi}zk_}62n+r>SL2_shmKzSYZ))gadLB(92%Tu(py_;lj98VFBHD~#} zwmD&tqmjuUUc7TN>XDlf1;SfGuf$O-Wt1uCTXgH0TQ3tlRD|T)q$xIUr*I3=z|wJJWSdMCzJ+*5W<#tK_yZ zgU2K5{Lf*gqSLM= z4JEgj#uM`tW08^Hp8oYNlhNN}JDQ2-TWy7x%n?WCW20vSlkJ03>UXhSfMQ^W%L;>l zQ1SY4T>XXA^22b?D>FQMSd+;d@t?qcRjC?*XA6KgF#w-jVzZoV!dEn>)XcWc9m2CU z#3nE~VBq7Z`uFS8(zdNE%Uj$}_Bmj>K6A{ls3+%8Luaz}KZXre(<4;1OT|zw6_FNB z>Zd#pO3Sm=FJ-;FN4Sxf?J=$i?Y*)-hbJSwV;J*Yb}lbNQWu_i?j&g>m6Tv2F3AF5 z{{R;!1m}a0Gx$~AH|*DX6t}jsT}H`u=4R=CdRqZ|Stm*#L7 z91)NYPfCL3(g`Jdi-B-&*4crEIRu|%MHwYW0B1YBNhI|qwRD9G;QM*742vYL81h2^ zaC!Qg&S;~wv(#=NMUH8&UO62{EWT@SeX5~eP)^-H%vM%1JUgNr9a19>tsA8Ahn^{- zD!?8?9^bEX>re3(mXHM913=DM_!McnCyQc2)?S3U;G?_F{l zGIkvO-aAr+RogApa*d19-=K_4sEoyr%tq6X?wofV{MhJo$2A;EOl@$57%S{$CnI(T zBl-RnhifZ&_UrQEMazw&=05(lw{B0`CWro6qp4Nw4Q%vwB1lmWn>*XX8F`TX$1DLj z>~Vwn(i_7C%#cU25{bQ;kTA|hIR60kdsKQOu(Z6n&+nE#9Q7QnR?`^jw@@wV^OcFn zZu|qCK$=N!Ww&=`h{D-Ic>B4cFil)7?%dY`NMo03$>xu| zjJ!58k`HswBifwzW<^B|MB7yDJy$r-AAui-;YjY$leLK4yN^9j9MyZU5_Z}E{n+R| z+Od-NtV~_a2ykE~*M4)g!*k|yoDb)ZVd+-vuOpIKAs$?9G++`lxHvs|&OJECZ+hgW zxRxunjU;7M$xshW^{%eY)*GabS(9|1XA*A7A28^6?^)EQxo&M4N;@*jYS%W_7Z%=Z ztd{{wKsesRorDg#2PZy*2C^1wrn;3ayvPuS5^Zc8-)Qa9kRn9ka`Sc4l;RA zX}9eRHjqckvng%Z&UxwA{Nk!hZxi0TGUs;2Ad`YfJmaToOaB0#;yBxt^W8E2$Weh> z#wl4LOJs`gd3W|tA}#3%#^S%kJ;>)C)f&jC8aU-;I0O!$9{sv={&Q5=bs+e+&~&@_xj~=n6%&6h{6QVaVsUJ^SLR%WDL1 zT(U%}UKK1y8Ts-3D{^HY+7|KST?Ksa~4tohydF^6P0JD73lfl}#r*D#|h zO#4ECgUILWQz}YP)NQw^dsDmf?-0mif&jil6en@R6TI{U oss.Client: + global _defaultClient + if _defaultClient is not None: + return _defaultClient + + cfg = oss.config.load_default() + cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY) + cfg.region = REGION + cfg.endpoint = ENDPOINT + _defaultClient = oss.Client(cfg) + + return _defaultClient + +def get_invalid_ak_client() -> oss.Client: + global _invalidAkClient + if _invalidAkClient is not None: + return _invalidAkClient + + cfg = oss.config.load_default() + cfg.credentials_provider = oss.credentials.StaticCredentialsProvider('invalid-ak', 'invalid') + cfg.region = REGION + cfg.endpoint = ENDPOINT + _invalidAkClient = oss.Client(cfg) + + return _invalidAkClient + +def get_client(region:str, endpoint:str) -> oss.Client: + cfg = oss.config.load_default() + cfg.credentials_provider = oss.credentials.StaticCredentialsProvider(ACCESS_ID, ACCESS_KEY) + cfg.region = region + cfg.endpoint = endpoint + return oss.Client(cfg) + +def get_client_use_ststoken(region:str, endpoint:str) -> oss.Client: + result = sts_assume_role(ACCESS_ID, ACCESS_KEY, RAM_ROLE_ARN) + cfg = oss.config.load_default() + cfg.credentials_provider = oss.credentials.StaticCredentialsProvider( + result['Credentials']['AccessKeyId'], + result['Credentials']['AccessKeySecret'], + result['Credentials']['SecurityToken'] + ) + cfg.region = region + cfg.endpoint = endpoint + return oss.Client(cfg) + + +def get_kms_id(region:str) ->str: + return + +def random_lowstr(n): + return ''.join(random.choice(string.ascii_lowercase) for i in range(n)) + +def random_str(n): + return ''.join(random.choice(string.ascii_letters) for i in range(n)) + +def random_bucket_name(): + return BUCKETNAME_PREFIX + random_lowstr(4) + '-' + str(int(datetime.datetime.now(datetime.timezone.utc).timestamp())) + +def clean_objects(client:oss.Client, bucket_name:str) -> None: + marker = '' + is_truncated = True + while is_truncated: + result = client.list_objects(oss.ListObjectsRequest(bucket=bucket_name, marker=marker)) + if result.contents is not None: + delete_object = [] + for o in result.contents: + delete_object.append(oss.DeleteObject(key=o.key)) + + if len(delete_object) > 0: + client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest( + bucket=bucket_name, + objects=delete_object)) + is_truncated = result.is_truncated + marker = result.next_marker + + +def clean_parts(client:oss.Client, bucket_name:str) -> None: + marker = '' + is_truncated = True + while is_truncated: + result = client.list_multipart_uploads(oss.ListMultipartUploadsRequest( + bucket=bucket_name, + upload_id_marker=marker + )) + if result.uploads is not None: + for o in result.uploads: + client.abort_multipart_upload(oss.AbortMultipartUploadRequest( + bucket=bucket_name, + key=o.key, + upload_id=o.upload_id + )) + + is_truncated = result.is_truncated + marker = result.next_upload_id_marker + + +def clean_bucket(props:oss.BucketProperties) -> None: + if props.intranet_endpoint == ENDPOINT or props.extranet_endpoint == ENDPOINT: + client = get_default_client() + else: + client = get_client(props.region, props.extranet_endpoint) + clean_objects(client, props.name) + clean_parts(client, props.name) + client.delete_bucket(oss.DeleteBucketRequest(bucket=props.name)) + + +def clean_buckets(prefix:str) -> None: + client = get_default_client() + result = client.list_buckets(oss.ListBucketsRequest( + prefix=BUCKETNAME_PREFIX + )) + for props in result.buckets: + clean_bucket(props) + +def sts_assume_role(access_key_id:str, access_key_secret:str, role_arn:str) -> dict: + # StsSignVersion sts sign version + StsSignVersion = "1.0" + # StsAPIVersion sts api version + StsAPIVersion = "2015-04-01" + # StsHost sts host + StsHost = "https://sts.aliyuncs.com/" + # TimeFormat time fomrat + TimeFormat = '%Y-%m-%dT%H:%M:%SZ' + # RespBodyFormat respone body format + RespBodyFormat = "JSON" + # PercentEncode '/' + PercentEncode = "%2F" + # HTTPGet http get method + HTTPGet = "GET" + uuid = f"Nonce-{str(random.randint(0, 10000))}" + queryStr = "SignatureVersion=" + StsSignVersion + queryStr += "&Format=" + RespBodyFormat + queryStr += "&Timestamp=" + quote(datetime.datetime.now(datetime.timezone.utc).strftime(TimeFormat), safe='') + queryStr += "&RoleArn=" + quote(role_arn, safe='') + queryStr += "&RoleSessionName=" + "oss_test_sess" + queryStr += "&AccessKeyId=" + access_key_id + queryStr += "&SignatureMethod=HMAC-SHA1" + queryStr += "&Version=" + StsAPIVersion + queryStr += "&Action=AssumeRole" + queryStr += "&SignatureNonce=" + uuid + queryStr += "&DurationSeconds=3600" + + #Sort query string + key_val_pairs = [] + for pair in queryStr.split('&'): + key, _, value = pair.partition('=') + key_val_pairs.append((key, value)) + + sorted_key_vals = [] + for key, value in sorted(key_val_pairs): + sorted_key_vals.append(f'{key}={value}') + + str_to_sign = HTTPGet + "&" + PercentEncode + "&" + quote('&'.join(sorted_key_vals), safe='') + + # Generate signature + h = hmac.new((access_key_secret+"&").encode(), str_to_sign.encode(), hashlib.sha1) + signature = base64.b64encode(h.digest()).decode() + + # Build url + assume_url = StsHost + "?" + queryStr + "&Signature=" + quote(signature, safe='') + + response = requests.get(assume_url) + + return json.loads(response.content) + \ No newline at end of file diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py new file mode 100644 index 0000000..31ca825 --- /dev/null +++ b/tests/integration/test_client.py @@ -0,0 +1,2227 @@ +# pylint: skip-file +from typing import cast +import os +import tempfile +import datetime +import requests +import alibabacloud_oss_v2 as oss +from . import TestIntegration, random_bucket_name, random_str, REGION, OBJECTNAME_PREFIX, get_client + + +class TestBucketBasic(TestIntegration): + + def test_put_bucket(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + def test_put_bucket_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + + def test_bucket_acl(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # get bucket acl + result = self.client.get_bucket_acl(oss.GetBucketAclRequest( + bucket=bucket_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual('private', result.acl) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put bucket acl + result = self.client.put_bucket_acl(oss.PutBucketAclRequest( + bucket=bucket_name, + acl='public-read-write' + )) + self.assertEqual(200, result.status_code) + + # get bucket acl + result = self.client.get_bucket_acl(oss.GetBucketAclRequest( + bucket=bucket_name, + )) + self.assertEqual('public-read-write', result.acl) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + def test_put_bucket_acl_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.put_bucket_acl(oss.PutBucketAclRequest( + bucket=bucket_name, + acl='public-read-write' + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + def test_get_bucket_acl_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.get_bucket_acl(oss.GetBucketAclRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + + def test_list_objects_v2(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + result = self.client.list_objects_v2(oss.ListObjectsV2Request( + bucket=bucket_name, + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + self.assertEqual(bucket_name, result.name) + self.assertEqual('/', result.delimiter) + self.assertEqual('b', result.start_after) + self.assertEqual('url', result.encoding_type) + self.assertEqual(10, result.max_keys) + self.assertEqual('aaa', result.prefix) + + + def test_list_objects_v2_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.list_objects_v2(oss.ListObjectsV2Request( + bucket=bucket_name,)) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + try: + self.invalid_client.list_objects_v2(oss.ListObjectsV2Request( + bucket=bucket_name, + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + def test_get_bucket_stat(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # get bucket stat + result = self.client.get_bucket_stat(oss.models.GetBucketStatRequest( + bucket=bucket_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + self.assertTrue(result.storage==0) + self.assertTrue(result.object_count==0) + self.assertTrue(result.multi_part_upload_count==0) + self.assertTrue(result.live_channel_count==0) + self.assertTrue(result.last_modified_time==0) + self.assertTrue(result.standard_storage==0) + self.assertTrue(result.standard_object_count==0) + self.assertTrue(result.infrequent_access_storage==0) + self.assertTrue(result.infrequent_access_real_storage==0) + self.assertTrue(result.infrequent_access_object_count==0) + self.assertTrue(result.archive_storage==0) + self.assertTrue(result.archive_real_storage==0) + self.assertTrue(result.archive_object_count==0) + self.assertTrue(result.cold_archive_storage==0) + self.assertTrue(result.cold_archive_real_storage==0) + self.assertTrue(result.cold_archive_object_count==0) + self.assertTrue(result.deep_cold_archive_storage==0) + self.assertTrue(result.deep_cold_archive_real_storage==0) + self.assertTrue(result.deep_cold_archive_object_count==0) + self.assertTrue(result.delete_marker_count==0) + + def test_get_bucket_stat_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.get_bucket_stat(oss.models.GetBucketStatRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + def test_bucket_location(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # get bucket location + result = self.client.get_bucket_location(oss.models.GetBucketLocationRequest( + bucket=bucket_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual(f'oss-{REGION}', result.location) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + def test_put_bucket_location_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.get_bucket_location(oss.models.GetBucketLocationRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + def test_get_bucket_info(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # get bucket into + result = self.client.get_bucket_info(oss.models.GetBucketInfoRequest( + bucket=bucket_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + self.assertEqual('private', result.bucket_info.acl) + self.assertEqual('Disabled', result.bucket_info.access_monitor) + self.assertEqual(False, result.bucket_info.block_public_access) + self.assertEqual('LRS', result.bucket_info.data_redundancy_type) + self.assertEqual('Disabled', result.bucket_info.cross_region_replication) + self.assertIsNotNone(result.bucket_info.resource_group_id) + self.assertIsNotNone(result.bucket_info.creation_date) + self.assertIsNotNone(result.bucket_info.extranet_endpoint) + self.assertIsNotNone(result.bucket_info.intranet_endpoint) + self.assertIsNotNone(result.bucket_info.location) + self.assertIsNotNone(result.bucket_info.transfer_acceleration) + self.assertEqual('IA', result.bucket_info.storage_class) + self.assertIsNotNone(result.bucket_info.owner.id) + self.assertIsNotNone(result.bucket_info.owner.display_name) + self.assertIsNotNone(result.bucket_info.sse_rule.sse_algorithm) + + + def test_get_bucket_info_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.get_bucket_info(oss.models.GetBucketInfoRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + + def test_bucket_versions(self): + bucket_name = random_bucket_name() + result = self.client.put_bucket(oss.PutBucketRequest( + bucket=bucket_name, + acl='private', + create_bucket_configuration=oss.CreateBucketConfiguration( + storage_class='IA' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # put bucket versioning + result = self.client.put_bucket_versioning(oss.PutBucketVersioningRequest( + bucket=bucket_name, + versioning_configuration=oss.VersioningConfiguration( + status='Enabled' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # get bucket versioning + result = self.client.get_bucket_versioning(oss.GetBucketVersioningRequest( + bucket=bucket_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual('Enabled', result.version_status) + + # list object versions + result = self.client.list_object_versions(oss.ListObjectVersionsRequest( + bucket=bucket_name, + )) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + self.assertEqual(bucket_name, result.name) + self.assertEqual(False, result.is_truncated) + self.assertEqual(100, result.max_keys) + + # list object versions case 2 + result = self.client.list_object_versions(oss.ListObjectVersionsRequest( + bucket=bucket_name, + delimiter='/', + key_marker='MARKER', + max_keys=999, + prefix='AA/a', + encoding_type='url', + )) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + self.assertEqual(bucket_name, result.name) + self.assertEqual(False, result.is_truncated) + self.assertEqual(999, result.max_keys) + self.assertEqual('/', result.delimiter) + self.assertEqual('MARKER', result.key_marker) + self.assertEqual('AA/a', result.prefix) + self.assertEqual('url', result.encoding_type) + + def test_put_bucket_versioning_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.put_bucket_versioning(oss.PutBucketVersioningRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + def test_get_bucket_versioning_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.get_bucket_versioning(oss.GetBucketVersioningRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + def test_list_object_versions_fail(self): + bucket_name = random_bucket_name() + try: + self.invalid_client.list_object_versions(oss.ListObjectVersionsRequest( + bucket=bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('NoSuchBucket', serr.code) + + +class TestRegion(TestIntegration): + def test_describe_regions(self): + result = self.client.describe_regions(oss.DescribeRegionsRequest( + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + self.assertTrue(result.region_info.__len__()>1) + + + result = self.client.describe_regions(oss.DescribeRegionsRequest( + regions='oss-cn-hangzhou', + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + self.assertTrue(result.region_info.__len__()==1) + self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[0].accelerate_endpoint) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.region_info[0].internal_endpoint) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.region_info[0].internet_endpoint) + self.assertEqual('oss-cn-hangzhou', result.region_info[0].region) + + def test_describe_regions_fail(self): + try: + self.invalid_client.describe_regions(oss.DescribeRegionsRequest()) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + try: + self.invalid_client.describe_regions(oss.DescribeRegionsRequest( + regions='oss-cn-hangzhou', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + +class TestObjectBasic(TestIntegration): + def test_object_basic(self): + len = 1 * 1024 * 1024 + 1234 + #len = 1234 + data = random_str(len) + key = 'test-key' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectResult) + self.assertEqual(200, result.status_code) + + result = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.HeadObjectResult) + self.assertEqual(200, result.status_code) + self.assertEqual(len, result.content_length) + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.GetObjectResult) + self.assertEqual(200, result.status_code) + self.assertEqual(len, result.content_length) + + rdata = b''.join(result.body.iter_bytes()) or b'' + self.assertEqual(data.encode(), rdata) + result.body.close() + + result = self.client.get_object_meta(oss.GetObjectMetaRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.GetObjectMetaResult) + self.assertEqual(200, result.status_code) + self.assertEqual(len, result.content_length) + + def test_put_object_fail(self): + try: + self.invalid_client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key='invalid-key', + body=b'hello world', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('PutObject', str(e)) + self.assertIn('Endpoint: PUT', str(e)) + + def test_get_object_fail(self): + try: + self.invalid_client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('GetObject', str(e)) + self.assertIn('Endpoint: GET', str(e)) + + def test_head_object_fail(self): + try: + self.invalid_client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('HeadObject', str(e)) + self.assertIn('Endpoint: HEAD', str(e)) + + def test_get_object_meta_fail(self): + try: + self.invalid_client.get_object_meta(oss.GetObjectMetaRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('GetObjectMeta', str(e)) + self.assertIn('Endpoint: HEAD', str(e)) + + def test_get_object_range(self): + len = 12345 + step = 2512 + data = random_str(len) + key = 'test-key-range' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + rdata = b'' + for r in range(0, len, step): + gresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + range_header=f'bytes={r}-{r+step-1}', + range_behavior='standard' + )) + self.assertIsNotNone(gresult) + self.assertEqual(206, gresult.status_code) + self.assertLessEqual(gresult.content_length, step) + got = b''.join(gresult.body.iter_bytes()) or b'' + rdata += got + + self.assertEqual(data.encode(), rdata) + gresult.body.close() + + def test_append_object(self): + data1 = b'hello' + data2 = b' world' + + key = 'append_object' + result = self.client.append_object(oss.AppendObjectRequest( + bucket=self.bucket_name, + key=key, + position=0, + body=data1, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + self.assertEqual(5, result.next_position) + + result = self.client.append_object(oss.AppendObjectRequest( + bucket=self.bucket_name, + key=key, + position=result.next_position, + body=data2, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + self.assertEqual(11, result.next_position) + + gresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + got = b''.join(gresult.body.iter_bytes()) or b'' + self.assertEqual(b'hello world', got) + + def test_append_object_fail(self): + try: + self.invalid_client.append_object(oss.AppendObjectRequest( + bucket=self.bucket_name, + key='invalid-key', + position=0, + body=b'hello world', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + def test_delete_object(self): + length = 1234 + data = random_str(length) + key = f'test-key-delete-object-{random_str(16)}' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + self.assertEqual(length, result.content_length) + + result = self.client.delete_object(oss.DeleteObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertEqual(204, result.status_code) + self.assertIsInstance(result, oss.DeleteObjectResult) + + try: + result = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + except Exception as err: + self.assertIsNotNone(result) + self.assertIn('NoSuchKey', str(err)) + + + key = f'test-key-delete-object-no-exist-{random_str(16)}' + result = self.client.delete_object(oss.DeleteObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertEqual(204, result.status_code) + self.assertIsNone(result.version_id) + self.assertIsNone(result.delete_marker) + self.assertIsInstance(result, oss.DeleteObjectResult) + + + def test_delete_object_fail(self): + try: + self.invalid_client.delete_object(oss.DeleteObjectRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + + + def test_delete_multiple_objects(self): + length = 1234 + data = random_str(length) + key = OBJECTNAME_PREFIX + random_str(16) + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result = self.client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest( + bucket=self.bucket_name, + objects=[oss.DeleteObject(key=key)], + )) + self.assertIsInstance(result, oss.DeleteMultipleObjectsResult) + self.assertEqual(200, result.status_code) + self.assertIsNotNone(result.headers.get('x-oss-request-id')) + self.assertEqual(1, len(result.deleted_objects)) + self.assertEqual(key, result.deleted_objects[0].key) + + str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' + key = OBJECTNAME_PREFIX + random_str(16) + str1.decode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + self.assertEqual(length, result.content_length) + + result = self.client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest( + bucket=self.bucket_name, + encoding_type='url', + objects=[oss.DeleteObject(key=key)], + )) + self.assertIsInstance(result, oss.DeleteMultipleObjectsResult) + self.assertEqual(200, result.status_code) + self.assertIsNotNone(result.headers.get('x-oss-request-id')) + self.assertEqual(1, len(result.deleted_objects)) + self.assertEqual(key, result.deleted_objects[0].key) + + try: + result = self.client.head_object(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + except Exception as err: + self.assertIsInstance(err, oss.exceptions.OperationError) + err = cast(oss.exceptions.OperationError, err) + serr = err.unwrap() + self.assertIsInstance(serr, oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, serr) + self.assertIn('NoSuchKey', serr.code) + + def test_restore_object(self): + length = 123 + data = random_str(length) + key = OBJECTNAME_PREFIX + random_str(16) + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + storage_class=oss.StorageClassType.ARCHIVE, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result = self.client.restore_object(oss.RestoreObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.RestoreObjectResult) + self.assertEqual(202, result.status_code) + + try: + result = self.client.restore_object(oss.RestoreObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.fail("should not here") + except Exception as err: + self.assertIsInstance(err, oss.exceptions.OperationError) + err = cast(oss.exceptions.OperationError, err) + serr = err.unwrap() + self.assertIsInstance(serr, oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, serr) + self.assertIn('RestoreAlreadyInProgress', serr.code) + self.assertIn('The restore operation is in progress.', serr.message) + + def test_object_acl(self): + length = 123 + data = random_str(length) + key = OBJECTNAME_PREFIX + random_str(16) + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + acl=oss.ObjectACLType.PRIVATE, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + result = self.client.get_object_acl(oss.GetObjectAclRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.GetObjectAclResult) + self.assertEqual(200, result.status_code) + self.assertEqual('private', result.acl) + + result = self.client.put_object_acl(oss.PutObjectAclRequest( + bucket=self.bucket_name, + key=key, + acl=oss.ObjectACLType.PUBLICREAD + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectAclResult) + self.assertEqual(200, result.status_code) + + result = self.client.get_object_acl(oss.GetObjectAclRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.GetObjectAclResult) + self.assertEqual(200, result.status_code) + self.assertEqual('public-read', result.acl) + + def test_get_object_acl_fail(self): + try: + self.invalid_client.get_object_acl(oss.GetObjectAclRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('GetObjectAcl', str(e)) + self.assertIn('Endpoint: GET', str(e)) + + def test_put_object_acl_fail(self): + try: + self.invalid_client.put_object_acl(oss.PutObjectAclRequest( + bucket=self.bucket_name, + key='invalid-key', + acl='private', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertIn('PutObjectAcl', str(e)) + self.assertIn('Endpoint: PUT', str(e)) + +class TestMultipartUpload(TestIntegration): + def test_multipart_upload_object(self): + length1 = 100*1024 + data1 = random_str(length1) + length2 = 1234 + data2 = random_str(length2) + key = OBJECTNAME_PREFIX + random_str(16) + + result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.InitiateMultipartUploadResult) + self.assertEqual(200, result.status_code) + self.assertEqual(self.bucket_name, result.bucket) + self.assertEqual(key, result.key) + self.assertIsNotNone(key, result.upload_id) + + presult1 = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + part_number=1, + upload_id=result.upload_id, + body=data1, + )) + self.assertIsNotNone(presult1) + self.assertIsInstance(presult1, oss.UploadPartResult) + self.assertEqual(200, presult1.status_code) + self.assertIsNotNone(presult1.content_md5) + self.assertIsNotNone(presult1.etag) + self.assertIsNotNone(presult1.hash_crc64) + + presult2 = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + part_number=2, + upload_id=result.upload_id, + body=data2, + )) + self.assertIsNotNone(presult2) + self.assertIsInstance(presult2, oss.UploadPartResult) + self.assertEqual(200, presult2.status_code) + self.assertIsNotNone(presult2.content_md5) + self.assertIsNotNone(presult2.etag) + self.assertIsNotNone(presult2.hash_crc64) + + lpresult = self.client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=result.upload_id, + )) + self.assertIsNotNone(lpresult) + self.assertIsInstance(lpresult, oss.ListPartsResult) + self.assertEqual(200, lpresult.status_code) + self.assertEqual(self.bucket_name, lpresult.bucket) + self.assertEqual(key, lpresult.key) + self.assertEqual(2, lpresult.next_part_number_marker) + self.assertEqual(0, lpresult.part_number_marker) + self.assertEqual(False, lpresult.is_truncated) + self.assertEqual(1000, lpresult.max_parts) + self.assertEqual('Standard', lpresult.storage_class) + self.assertEqual(2, len(lpresult.parts)) + self.assertEqual(1, lpresult.parts[0].part_number) + self.assertEqual(length1, lpresult.parts[0].size) + self.assertEqual(presult1.etag, lpresult.parts[0].etag) + self.assertEqual(presult1.hash_crc64, lpresult.parts[0].hash_crc64) + self.assertEqual(2, lpresult.parts[1].part_number) + self.assertEqual(length2, lpresult.parts[1].size) + self.assertEqual(presult2.etag, lpresult.parts[1].etag) + self.assertEqual(presult2.hash_crc64, lpresult.parts[1].hash_crc64) + + cresult = self.client.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=result.upload_id, + body=data2, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=[ + oss.UploadPart(part_number=1, etag=presult1.etag), + oss.UploadPart(part_number=2, etag=presult2.etag), + ] + ) + )) + self.assertIsNotNone(cresult) + self.assertIsInstance(cresult, oss.CompleteMultipartUploadResult) + self.assertEqual(200, cresult.status_code) + self.assertEqual(self.bucket_name, cresult.bucket) + self.assertEqual(key, cresult.key) + self.assertIsNotNone(cresult.etag) + self.assertIsNotNone(cresult.hash_crc64) + + gresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(gresult) + self.assertEqual(200, gresult.status_code) + self.assertEqual(length1 + length2, gresult.content_length) + rdata = b''.join(gresult.body.iter_bytes()) or b'' + self.assertEqual(data1 + data2, rdata.decode()) + + def test_multipart_upload_object_special_key(self): + length1 = 100*1024 + data1 = random_str(length1) + length2 = 1234 + data2 = random_str(length2) + str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' + key = OBJECTNAME_PREFIX + random_str(16) + str1.decode() + + result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.InitiateMultipartUploadResult) + self.assertEqual(200, result.status_code) + self.assertEqual(self.bucket_name, result.bucket) + self.assertEqual(key, result.key) + self.assertIsNotNone(key, result.upload_id) + + presult1 = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + part_number=1, + upload_id=result.upload_id, + body=data1, + )) + self.assertIsNotNone(presult1) + self.assertIsInstance(presult1, oss.UploadPartResult) + self.assertEqual(200, presult1.status_code) + self.assertIsNotNone(presult1.content_md5) + self.assertIsNotNone(presult1.etag) + self.assertIsNotNone(presult1.hash_crc64) + + presult2 = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + part_number=2, + upload_id=result.upload_id, + body=data2, + )) + self.assertIsNotNone(presult2) + self.assertIsInstance(presult2, oss.UploadPartResult) + self.assertEqual(200, presult2.status_code) + self.assertIsNotNone(presult2.content_md5) + self.assertIsNotNone(presult2.etag) + self.assertIsNotNone(presult2.hash_crc64) + + cresult = self.client.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=result.upload_id, + body=data2, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=[ + oss.UploadPart(part_number=1, etag=presult1.etag), + oss.UploadPart(part_number=2, etag=presult2.etag), + ] + ) + )) + self.assertIsNotNone(cresult) + self.assertIsInstance(cresult, oss.CompleteMultipartUploadResult) + self.assertEqual(200, cresult.status_code) + self.assertEqual(self.bucket_name, cresult.bucket) + self.assertEqual(key, cresult.key) + self.assertIsNotNone(cresult.etag) + self.assertIsNotNone(cresult.hash_crc64) + + gresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(gresult) + self.assertEqual(200, gresult.status_code) + self.assertEqual(length1 + length2, gresult.content_length) + rdata = b''.join(gresult.body.iter_bytes()) or b'' + self.assertEqual(data1 + data2, rdata.decode()) + + + def test_multipart_upload_object_encoding_type(self): + str1 = b'\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10' + key = OBJECTNAME_PREFIX + random_str(16) + str1.decode() + + result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.InitiateMultipartUploadResult) + self.assertEqual(200, result.status_code) + self.assertEqual(self.bucket_name, result.bucket) + self.assertEqual(key, result.key) + self.assertIsNotNone(key, result.upload_id) + + presult1 = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + part_number=1, + upload_id=result.upload_id, + body='hello world', + )) + self.assertIsNotNone(presult1) + self.assertIsInstance(presult1, oss.UploadPartResult) + self.assertEqual(200, presult1.status_code) + self.assertIsNotNone(presult1.content_md5) + self.assertIsNotNone(presult1.etag) + self.assertIsNotNone(presult1.hash_crc64) + + lpresult = self.client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=result.upload_id, + )) + self.assertIsNotNone(lpresult) + self.assertIsInstance(lpresult, oss.ListPartsResult) + self.assertEqual(200, lpresult.status_code) + self.assertEqual(self.bucket_name, lpresult.bucket) + self.assertEqual(key, lpresult.key) + + luresult = self.client.list_multipart_uploads(oss.ListMultipartUploadsRequest( + bucket=self.bucket_name, + )) + self.assertIsNotNone(luresult) + self.assertIsInstance(luresult, oss.ListMultipartUploadsResult) + self.assertEqual(200, luresult.status_code) + self.assertEqual(self.bucket_name, luresult.bucket) + self.assertEqual(False, luresult.is_truncated) + self.assertEqual(None, luresult.key_marker) + self.assertEqual(key, luresult.next_key_marker) + self.assertEqual(1, len(luresult.uploads)) + self.assertEqual(key, luresult.uploads[0].key) + self.assertEqual(result.upload_id, luresult.uploads[0].upload_id) + + abresult = self.client.abort_multipart_upload(oss.AbortMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=result.upload_id, + )) + self.assertIsNotNone(abresult) + self.assertIsInstance(abresult, oss.AbortMultipartUploadResult) + self.assertEqual(204, abresult.status_code) + + def test_multipart_upload_from_file(self): + part_size = 100 * 1024 + data_size = 3 * part_size + 1245 + data = random_str(data_size).encode() + key = 'multipart-file.bin' + + #init + initresult = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(initresult) + self.assertIsInstance(initresult, oss.InitiateMultipartUploadResult) + self.assertEqual(200, initresult.status_code) + + #upload part + part_number = 1 + upload_parts = [] + with tempfile.TemporaryFile('w+b') as f: + f.write(data) + for start in range(0, data_size, part_size): + n = part_size + if start + n > data_size: + n = data_size - start + reader = oss.io_utils.SectionReader(oss.io_utils.ReadAtReader(f), start, n) + upresult = self.client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + part_number=part_number, + body=reader + )) + self.assertIsNotNone(upresult) + self.assertIsInstance(upresult, oss.UploadPartResult) + self.assertEqual(200, upresult.status_code) + upload_parts.append(oss.UploadPart(part_number=part_number, etag=upresult.etag)) + part_number += 1 + + self.assertEqual(4, len(upload_parts)) + + #listpart + lpresult = self.client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id + )) + self.assertIsNotNone(lpresult) + self.assertIsInstance(lpresult, oss.ListPartsResult) + self.assertEqual(200, lpresult.status_code) + + #complete + parts = sorted(upload_parts, key=lambda p: p.part_number) + cmresult = self.client.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=parts + ) + )) + self.assertIsNotNone(cmresult) + self.assertIsInstance(cmresult, oss.CompleteMultipartUploadResult) + self.assertEqual(200, cmresult.status_code) + + # get object and check + gowresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(gowresult) + self.assertIsInstance(gowresult, oss.GetObjectResult) + self.assertEqual(200, gowresult.status_code) + self.assertEqual(data_size, len(gowresult.body.content)) + self.assertEqual(data, gowresult.body.content) + + def test_initiate_multipart_upload_fail(self): + try: + self.invalid_client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key='invalid-key', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('InitiateMultipartUpload', str(e)) + self.assertIn('Endpoint: POST', str(e)) + + def test_upload_part_fail(self): + try: + self.invalid_client.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key='invalid-key', + upload_id='upload-id', + part_number=1, + body='hello world' + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('UploadPart', str(e)) + self.assertIn('Endpoint: PUT', str(e)) + + def test_upload_part_copy_fail(self): + try: + self.invalid_client.upload_part_copy(oss.UploadPartCopyRequest( + bucket=self.bucket_name, + key='invalid-key', + source_key='source-invalid-key', + upload_id='upload-id', + part_number=1, + body='hello world' + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('UploadPartCopy', str(e)) + self.assertIn('Endpoint: PUT', str(e)) + + def test_complete_multipart_upload_fail(self): + try: + self.invalid_client.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key='invalid-key', + upload_id='upload-id', + complete_all='yes' + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('CompleteMultipartUpload', str(e)) + self.assertIn('Endpoint: POST', str(e)) + + def test_abort_multipart_upload_fail(self): + try: + self.invalid_client.abort_multipart_upload(oss.AbortMultipartUploadRequest( + bucket=self.bucket_name, + key='invalid-key', + upload_id='upload-id', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('AbortMultipartUpload', str(e)) + self.assertIn('Endpoint: DELETE', str(e)) + + def test_list_multipart_uploads_fail(self): + try: + self.invalid_client.list_multipart_uploads(oss.ListMultipartUploadsRequest( + bucket=self.bucket_name, + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('ListMultipartUploads', str(e)) + self.assertIn('Endpoint: GET', str(e)) + + def test_list_parts_fail(self): + try: + self.invalid_client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key='invalid-key', + upload_id='upload-id', + )) + self.fail("should not here") + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual(24, len(serr.request_id)) + self.assertEqual('InvalidAccessKeyId', serr.code) + self.assertIn('ListParts', str(e)) + self.assertIn('Endpoint: GET', str(e)) + + +class TestPresign(TestIntegration): + def test_presign_get_object(self): + len = 1234 + data = random_str(len) + key = 'presign-get-test-key' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + preresult = self.client.presign(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + with requests.get(preresult.url) as resp: + self.assertEqual(data.encode(), resp.content) + self.assertEqual(200, resp.status_code) + + def test_presign_put_object(self): + len = 1234 + data = random_str(len) + key = 'presign-put-test-key' + preresult = self.client.presign(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + content_type='text/txt' + )) + + with requests.put(preresult.url, headers=preresult.signed_headers, data=data) as resp: + self.assertEqual(200, resp.status_code) + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result.body.read() + self.assertEqual(200, result.status_code) + self.assertEqual(data.encode(), result.body.content) + self.assertEqual('text/txt', result.headers.get('content-type')) + + def test_presign_without_signed_headers(self): + len = 1234 + data = random_str(len) + key = 'presign-put-test-key-fail' + preresult = self.client.presign(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + content_type='text/txt' + )) + + with requests.put(preresult.url, data=data) as resp: + self.assertEqual(403, resp.status_code) + + def test_presign_fail(self): + # unsupport request + request = oss.ListObjectsV2Request( + bucket='bucket' + ) + + try: + self.client.presign(request) + self.fail("should not here") + except Exception as err: + self.assertIsInstance(err, oss.exceptions.ParamInvalidError) + + # greater than 7 days + request = oss.GetObjectRequest( + bucket='bucket', + key='key+123', + version_id='versionId' + ) + try: + timedelta = datetime.timedelta(days=8) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + self.client.presign(request, expiration=expiration) + self.fail("should not here") + except Exception as err: + self.assertIsInstance(err, oss.exceptions.PresignExpirationError) + + def test_presign_head_object(self): + len = 1234 + data = random_str(len) + key = 'presign-head-test-key' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data, + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + preresult = self.client.presign(oss.HeadObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + with requests.head(preresult.url) as resp: + self.assertEqual(200, resp.status_code) + self.assertEqual('1234', resp.headers.get('content-length')) + + def test_presign_initiate_multipart_upload(self): + + key = 'presign-initiate-multipart-upload-test-key' + preresult = self.client.presign(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + + with requests.post(preresult.url) as resp: + self.assertEqual(200, resp.status_code) + obj = oss.InitiateMultipartUploadResult() + oss.serde.deserialize_xml(xml_data=resp.content, obj=obj) + self.assertEqual(self.bucket_name, obj.bucket) + self.assertEqual(key, obj.key) + self.assertIsNotNone(obj.upload_id) + + def test_presign_upload_part(self): + len = 1234 + data = random_str(len) + key = 'presign-upload-part-test-key' + init_pre_result = self.client.presign(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + )) + + with requests.post(init_pre_result.url) as resp: + self.assertEqual(200, resp.status_code) + obj = oss.InitiateMultipartUploadResult() + oss.serde.deserialize_xml(xml_data=resp.content, obj=obj) + self.assertEqual(self.bucket_name, obj.bucket) + self.assertEqual(key, obj.key) + self.assertIsNotNone(obj.upload_id) + + for i in range(1, 4): + up_pre_result = self.client.presign(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + part_number=i, + content_type='text/txt' + )) + + with requests.put(up_pre_result.url, headers=up_pre_result.signed_headers, data=data) as resp: + self.assertEqual(200, resp.status_code) + + + + def test_presign_complete_multipart_upload(self): + len = 200 * 1024 + data = random_str(len) + key = 'presign-complete-multipart-upload-test-key' + init_pre_result = self.client.presign(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + content_type='text/txt' + )) + + with requests.post(init_pre_result.url, headers=init_pre_result.signed_headers) as resp: + self.assertEqual(200, resp.status_code) + obj = oss.InitiateMultipartUploadResult() + oss.serde.deserialize_xml(xml_data=resp.content, obj=obj) + self.assertEqual(self.bucket_name, obj.bucket) + self.assertEqual(key, obj.key) + self.assertIsNotNone(obj.upload_id) + + for i in range(1, 4): + up_pre_result = self.client.presign(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + part_number=i, + )) + + with requests.put(up_pre_result.url, headers=up_pre_result.signed_headers, data=data) as resp: + self.assertEqual(200, resp.status_code) + + complete_pre_result = self.client.presign(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + complete_all='yes' + )) + + with requests.post(complete_pre_result.url, headers=complete_pre_result.signed_headers) as resp: + self.assertEqual(200, resp.status_code) + obj = oss.CompleteMultipartUploadResult() + oss.serde.deserialize_xml(xml_data=resp.content, obj=obj) + self.assertEqual(self.bucket_name, obj.bucket) + self.assertEqual(key, obj.key) + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + + result.body.read() + self.assertEqual(200, result.status_code) + self.assertEqual(3 * 200 * 1024, result.content_length) + self.assertEqual('text/txt', result.headers.get('content-type')) + + def test_presign_abort_multipart_upload(self): + len = 200 * 1024 + data = random_str(len) + key = 'presign-abort-multipart-upload-test-key' + + init_pre_result = self.client.presign(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + content_type='text/txt' + )) + + with requests.post(init_pre_result.url, headers=init_pre_result.signed_headers) as resp: + self.assertEqual(200, resp.status_code) + obj = oss.InitiateMultipartUploadResult() + oss.serde.deserialize_xml(xml_data=resp.content, obj=obj) + self.assertEqual(self.bucket_name, obj.bucket) + self.assertEqual(key, obj.key) + self.assertIsNotNone(obj.upload_id) + + for i in range(1, 4): + up_pre_result = self.client.presign(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + part_number=i, + )) + + with requests.put(up_pre_result.url, headers=up_pre_result.signed_headers, data=data) as resp: + self.assertEqual(200, resp.status_code) + + + list_result = self.client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + )) + self.assertIsNotNone(list_result) + self.assertEqual(200, list_result.status_code) + self.assertEqual(self.bucket_name, list_result.bucket) + self.assertEqual(key, list_result.key) + self.assertEqual(3, list_result.parts.__len__()) + + + abort_result = self.client.presign(oss.AbortMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + )) + + with requests.delete(abort_result.url, headers=abort_result.signed_headers) as resp: + self.assertEqual(204, resp.status_code) + + try: + self.client.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=obj.upload_id, + )) + except Exception as e: + ope = cast(oss.exceptions.OperationError, e) + self.assertIsInstance(ope.unwrap(), oss.exceptions.ServiceError) + serr = cast(oss.exceptions.ServiceError, ope.unwrap()) + self.assertEqual(404, serr.status_code) + self.assertEqual('NoSuchUpload', serr.code) + + +class TestPaginator(TestIntegration): + def test_list_objects_paginator(self): + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name)) + + for i in range(9): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + paginator = self.client.list_objects_paginator(limit=1) + request = oss.ListObjectsRequest(bucket=bucket_name) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.contents)) + self.assertEqual(f'key-{j}', p.contents[0].key) + self.assertEqual(0, p.contents[0].size) + j += 1 + self.assertIsNone(request.marker) + + iterator = paginator.iter_page(request, limit=3) + j = 0 + for p in iterator: + self.assertEqual(3, p.max_keys) + self.assertEqual(3, len(p.contents)) + self.assertEqual(f'key-{j*3}', p.contents[0].key) + self.assertEqual(0, p.contents[0].size) + j += 1 + self.assertIsNone(request.marker) + + paginator = self.client.list_objects_paginator() + request = oss.ListObjectsRequest(bucket=bucket_name, prefix='key-1') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(1, len(p.contents)) + self.assertEqual(f'key-1', p.contents[0].key) + j += 1 + self.assertEqual(1, j) + + #encoding + for i in range(3): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'url-%123-/?#:key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListObjectsRequest(bucket=bucket_name, prefix='url-') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(3, len(p.contents)) + self.assertEqual(f'url-%123-/?#:key-0', p.contents[0].key) + self.assertEqual(f'url-%123-/?#:key-1', p.contents[1].key) + self.assertEqual(f'url-%123-/?#:key-2', p.contents[2].key) + j += 1 + self.assertEqual(1, j) + + def test_list_v2_objects_paginator(self): + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name)) + + for i in range(9): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + paginator = self.client.list_objects_v2_paginator(limit=1) + request = oss.ListObjectsV2Request(bucket=bucket_name) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.contents)) + self.assertEqual(f'key-{j}', p.contents[0].key) + self.assertEqual(0, p.contents[0].size) + j += 1 + self.assertIsNone(request.continuation_token) + + iterator = paginator.iter_page(request, limit=3) + j = 0 + for p in iterator: + self.assertEqual(3, p.max_keys) + self.assertEqual(3, len(p.contents)) + self.assertEqual(f'key-{j*3}', p.contents[0].key) + self.assertEqual(0, p.contents[0].size) + j += 1 + self.assertIsNone(request.continuation_token) + + paginator = self.client.list_objects_v2_paginator() + request = oss.ListObjectsV2Request(bucket=bucket_name, prefix='key-1') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(1, len(p.contents)) + self.assertEqual(f'key-1', p.contents[0].key) + j += 1 + self.assertEqual(1, j) + + #encoding + for i in range(3): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'url-%123-/?#:key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListObjectsV2Request(bucket=bucket_name, prefix='url-') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(3, len(p.contents)) + self.assertEqual(f'url-%123-/?#:key-0', p.contents[0].key) + self.assertEqual(f'url-%123-/?#:key-1', p.contents[1].key) + self.assertEqual(f'url-%123-/?#:key-2', p.contents[2].key) + j += 1 + self.assertEqual(1, j) + + def test_list_object_versions_paginator(self): + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name)) + + for i in range(9): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + paginator = self.client.list_object_versions_paginator(limit=1) + request = oss.ListObjectVersionsRequest(bucket=bucket_name) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.version)) + self.assertEqual(f'key-{j}', p.version[0].key) + self.assertEqual(0, p.version[0].size) + j += 1 + self.assertIsNone(request.key_marker) + self.assertIsNone(request.version_id_marker) + + iterator = paginator.iter_page(request, limit=3) + j = 0 + for p in iterator: + self.assertEqual(3, p.max_keys) + self.assertEqual(3, len(p.version)) + self.assertEqual(f'key-{j*3}', p.version[0].key) + self.assertEqual(0, p.version[0].size) + j += 1 + self.assertIsNone(request.key_marker) + self.assertIsNone(request.version_id_marker) + + paginator = self.client.list_object_versions_paginator() + request = oss.ListObjectVersionsRequest(bucket=bucket_name, prefix='key-1') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(1, len(p.version)) + self.assertEqual(f'key-1', p.version[0].key) + j += 1 + self.assertEqual(1, j) + + # encoding + for i in range(3): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'url-%123-/?#:key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListObjectVersionsRequest(bucket=bucket_name, prefix='url-') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(3, len(p.version)) + self.assertEqual(f'url-%123-/?#:key-0', p.version[0].key) + self.assertEqual(f'url-%123-/?#:key-1', p.version[1].key) + self.assertEqual(f'url-%123-/?#:key-2', p.version[2].key) + j += 1 + self.assertEqual(1, j) + + result = self.client.put_bucket_versioning(oss.PutBucketVersioningRequest( + bucket=bucket_name, + versioning_configuration=oss.VersioningConfiguration( + status='Enabled' + ) + )) + self.assertEqual(200, result.status_code) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + # version_id + for i in range(3): + result = self.client.put_object(oss.PutObjectRequest( + bucket=bucket_name, + key=f'version_id-%123-/?#:key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListObjectVersionsRequest(bucket=bucket_name, prefix='version_id-') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(100, p.max_keys) + self.assertEqual(3, len(p.version)) + self.assertEqual(f'version_id-%123-/?#:key-0', p.version[0].key) + self.assertEqual(f'version_id-%123-/?#:key-1', p.version[1].key) + self.assertEqual(f'version_id-%123-/?#:key-2', p.version[2].key) + self.assertIsNotNone(p.version[j].version_id) + j += 1 + self.assertEqual(1, j) + + # delete all version files + request = oss.ListObjectVersionsRequest(bucket=bucket_name) + iterator = paginator.iter_page(request, limit=1) + delete_object = [] + for p in iterator: + delete_object.append(oss.DeleteObject(key=p.version[0].key, version_id=p.version[0].version_id)) + + result = self.client.delete_multiple_objects(oss.DeleteMultipleObjectsRequest( + bucket=bucket_name, + objects=delete_object)) + self.assertEqual(200, result.status_code) + + + def test_list_buckets_paginator(self): + bucket_name_prefix = random_bucket_name() + bucket_name1 = bucket_name_prefix + '-1' + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name1)) + bucket_name2 = bucket_name_prefix + '-2' + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name2)) + bucket_name3 = bucket_name_prefix + '-3' + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name3)) + + paginator = self.client.list_buckets_paginator(limit=1) + request = oss.ListBucketsRequest( + prefix=bucket_name_prefix + ) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.buckets)) + j += 1 + self.assertIsNone(request.marker) + + iterator = paginator.iter_page(request, limit=3) + for p in iterator: + if p.is_truncated: + self.assertEqual(3, p.max_keys) + self.assertEqual(3, len(p.buckets)) + + self.assertIsNone(request.marker) + + paginator = self.client.list_buckets_paginator() + request = oss.ListBucketsRequest(prefix=bucket_name_prefix) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(3, len(p.buckets)) + j += 1 + self.assertEqual(1, j) + + + def test_list_parts_paginator(self): + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name)) + + key = OBJECTNAME_PREFIX + random_str(16) + + init_result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=bucket_name, + key=key, + )) + + for i in range(1,4): + result = self.client.upload_part(oss.UploadPartRequest( + bucket=bucket_name, + key=key, + part_number=i, + upload_id=init_result.upload_id, + body="data-test", + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + paginator = self.client.list_parts_paginator(limit=1) + request = oss.ListPartsRequest( + bucket=bucket_name, + key=key, + upload_id=init_result.upload_id) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.parts)) + self.assertEqual(j+1, p.parts[0].part_number) + self.assertEqual(9, p.parts[0].size) + j += 1 + self.assertIsNone(request.part_mumber_marker) + + iterator = paginator.iter_page(request, limit=3) + j = 0 + for p in iterator: + self.assertEqual(3, p.max_parts) + self.assertEqual(3, len(p.parts)) + self.assertEqual(1, p.parts[0].part_number) + self.assertEqual(9, p.parts[0].size) + j += 1 + self.assertIsNone(request.part_mumber_marker) + + #encoding + encoding_key = 'url-%123-/?#:key-' + init_result2 = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=bucket_name, + key=encoding_key, + )) + + for i in range(1,4): + result = self.client.upload_part(oss.UploadPartRequest( + bucket=bucket_name, + key=encoding_key, + part_number=i, + upload_id=init_result2.upload_id, + body=f'data{i}', + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListPartsRequest( + bucket=bucket_name, + key=encoding_key, + upload_id=init_result2.upload_id) + paginator = self.client.list_parts_paginator() + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1000, p.max_parts) + self.assertEqual(3, len(p.parts)) + self.assertEqual(j+1, p.parts[0].part_number) + self.assertEqual(5, p.parts[0].size) + j += 1 + self.assertEqual(1, j) + + def test_list_multipart_uploads_paginator(self): + bucket_name = random_bucket_name() + self.client.put_bucket(oss.PutBucketRequest(bucket=bucket_name)) + + for i in range(3): + result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=bucket_name, + key=f'key-{i}', + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + paginator = self.client.list_multipart_uploads_paginator(limit=1) + request = oss.ListMultipartUploadsRequest(bucket=bucket_name) + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1, len(p.uploads)) + self.assertEqual(f'key-{j}', p.uploads[0].key) + j += 1 + self.assertIsNone(request.key_marker) + self.assertIsNone(request.upload_id_marker) + + iterator = paginator.iter_page(request, limit=3) + j = 0 + for p in iterator: + self.assertEqual(3, p.max_uploads) + self.assertEqual(3, len(p.uploads)) + self.assertEqual(f'key-{j*3}', p.uploads[0].key) + j += 1 + self.assertIsNone(request.key_marker) + self.assertIsNone(request.upload_id_marker) + + paginator = self.client.list_multipart_uploads_paginator() + request = oss.ListMultipartUploadsRequest(bucket=bucket_name, prefix='key-1') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1000, p.max_uploads) + self.assertEqual(1, len(p.uploads)) + self.assertEqual(f'key-1', p.uploads[0].key) + j += 1 + self.assertEqual(1, j) + + #encoding + for i in range(3): + result = self.client.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=bucket_name, + key=f'upload-%123-/?#:key-{i}' + )) + self.assertIsNotNone(result) + self.assertEqual(200, result.status_code) + + request = oss.ListMultipartUploadsRequest(bucket=bucket_name, prefix='upload-%123-') + iterator = paginator.iter_page(request) + j = 0 + for p in iterator: + self.assertEqual(1000, p.max_uploads) + self.assertEqual(3, len(p.uploads)) + self.assertEqual(f'upload-%123-/?#:key-0', p.uploads[0].key) + self.assertEqual(f'upload-%123-/?#:key-1', p.uploads[1].key) + self.assertEqual(f'upload-%123-/?#:key-2', p.uploads[2].key) + j += 1 + self.assertEqual(1, j) + + +class TestExtension(TestIntegration): + def test_is_bucket_exist(self): + no_perm_client = self.invalid_client + err_client = get_client("", "") + + bucket_name_no_exist = self.bucket_name + "-no-exist" + + exist = self.client.is_bucket_exist(self.bucket_name) + self.assertTrue(exist) + + exist = self.client.is_bucket_exist(bucket_name_no_exist) + self.assertFalse(exist) + + exist = no_perm_client.is_bucket_exist(self.bucket_name) + self.assertTrue(exist) + + exist = no_perm_client.is_bucket_exist(bucket_name_no_exist) + self.assertFalse(exist) + + try: + exist = err_client.is_bucket_exist(self.bucket_name) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('invalid field, endpoint', str(err)) + + def test_is_object_exist(self): + bucket_name_no_exist = self.bucket_name + "-no-exist" + object_name = 'object-exist' + object_name_no_exist = "object-no-exist" + no_perm_client = self.invalid_client + err_client = get_client("", "") + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=object_name, + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + exist = self.client.is_object_exist(self.bucket_name, object_name) + self.assertTrue(exist) + + exist = self.client.is_object_exist(self.bucket_name, object_name_no_exist) + self.assertFalse(exist) + + try: + exist = self.client.is_object_exist(bucket_name_no_exist, object_name) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('NoSuchBucket', str(err)) + + try: + exist = self.client.is_object_exist(bucket_name_no_exist, object_name_no_exist) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('NoSuchBucket', str(err)) + + + try: + exist = no_perm_client.is_object_exist(self.bucket_name, object_name) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('InvalidAccessKeyId', str(err)) + + try: + exist = no_perm_client.is_object_exist(bucket_name_no_exist, object_name_no_exist) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('NoSuchBucket', str(err)) + + try: + exist = err_client.is_object_exist(self.bucket_name, object_name) + self.fail("shoud not here") + except oss.exceptions.OperationError as err: + self.assertIn('invalid field, endpoint', str(err)) + + def test_put_object_from_file(self): + example_data = b'' + with open("./tests/data/example.jpg", 'rb') as f: + example_data = f.read() + + key = 'object_from_file.jpg' + result = self.client.put_object_from_file(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key + ), "./tests/data/example.jpg") + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + gresult = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(200, gresult.status_code) + self.assertEqual('OK', gresult.status) + self.assertEqual(example_data, gresult.body.content) + + def test_put_object_from_file_fail(self): + key = 'object_from_file.jpg' + try: + self.client.put_object_from_file(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key + ), "./tests/data/invalid-example.jpg") + self.fail("shoud not here") + except FileNotFoundError as err: + self.assertIn("No such file or directory", str(err)) + + def test_get_object_to_file(self): + key = 'get_object_to_file.jpg' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body='hello world' + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + filename = '' + with tempfile.TemporaryFile('w+b', delete=False) as f: + filename = f.name + + gresult = self.client.get_object_to_file(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + ), f.name) + self.assertEqual(200, gresult.status_code) + self.assertEqual('OK', gresult.status) + + os.remove(filename) + + def test_get_object_to_file_fail(self): + key = 'get_object_to_file_fail.jpg' + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body='hello world' + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + filename = '' + with tempfile.TemporaryFile('w+b', delete=False) as f: + filename = f.name + try: + self.invalid_client.get_object_to_file(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + ), filename) + self.fail("shoud not here") + except Exception as err: + self.assertIn("InvalidAccessKeyId", str(err)) + + os.remove(filename) diff --git a/tests/integration/test_client_filelike.py b/tests/integration/test_client_filelike.py new file mode 100644 index 0000000..15e84cd --- /dev/null +++ b/tests/integration/test_client_filelike.py @@ -0,0 +1,984 @@ + +# pylint: skip-file +import os +import time +import random +import io +import alibabacloud_oss_v2 as oss +from . import TestIntegration, random_str + +class TestAppendOnlyFile(TestIntegration): + def test_append_file_bytes_only(self): + key = 'append_file-' + random_str(6) + data1 = "helle world" + data2 = random_str(12345) + data3 = random_str(100*1024*5 + 1234) + mix_data = data1 + data2 + data3 + + self.assertFalse(self.client.is_object_exist(self.bucket_name, key)) + + # open empty + append_f: oss.AppendOnlyFile = None + with self.client.appen_file(self.bucket_name, key) as f: + append_f = f + self.assertEqual(0, f.tell()) + self.assertEqual(True, f.writable()) + self.assertEqual('ab', f.mode) + self.assertEqual(f'oss://{self.bucket_name}/{key}', f.name) + self.assertEqual(False, f.closed) + + n = f.write(data1.encode()) + self.assertEqual(len(data1), n) + self.assertEqual(len(data1), f.tell()) + + n = f.write(data2.encode()) + self.assertEqual(len(data2), n) + self.assertEqual(len(data1) + len(data2), f.tell()) + + n = f.write(data3.encode()) + self.assertEqual(len(data3), n) + self.assertEqual(len(data1) + len(data2) + len(data3), f.tell()) + + self.assertIsNotNone(append_f) + self.assertIsInstance(append_f, oss.AppendOnlyFile) + self.assertEqual(True, append_f.closed) + # can close many times + append_f.close() + append_f.close() + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(mix_data), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual(mix_data.encode(), result.body.content) + + # open exist + append_f: oss.AppendOnlyFile = None + with self.client.appen_file(self.bucket_name, key) as f: + self.assertEqual(len(mix_data), f.tell()) + f.write(b'123') + append_f = f + self.assertEqual(False, append_f.closed) + f.flush() + + self.assertIsNotNone(append_f) + self.assertEqual(True, append_f.closed) + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(mix_data) + len('123'), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual((mix_data + '123').encode(), result.body.content) + + # flush + key = 'append_file-flush-' + random_str(6) + self.assertEqual(False, self.client.is_object_exist(self.bucket_name, key)) + append_f: oss.AppendOnlyFile = None + with self.client.appen_file(self.bucket_name, key) as f: + append_f = f + self.assertEqual(0, f.tell()) + self.assertEqual(False, append_f.closed) + f.flush() + self.assertIsNotNone(f._hash_crc64) + self.assertEqual(True, self.client.is_object_exist(self.bucket_name, key)) + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual('Appendable', result.object_type) + self.assertEqual(0, result.content_length) + + # open-close + key = 'append_file-open-close-' + random_str(6) + self.assertEqual(False, self.client.is_object_exist(self.bucket_name, key)) + append_f = self.client.appen_file(self.bucket_name, key) + self.assertIsNotNone(append_f) + self.assertEqual(False, append_f.closed) + self.assertEqual(0, append_f.tell()) + append_f.close() + self.assertEqual(True, append_f.closed) + self.assertEqual(False, self.client.is_object_exist(self.bucket_name, key)) + + + def test_append_file_fail(self): + # open normal file + key = 'normal_file-' + random_str(6) + data = "helle world" + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(True, self.client.is_object_exist(self.bucket_name, key)) + + try: + self.client.appen_file(self.bucket_name, key) + self.fail('shoud not here') + except Exception as err: + self.assertIn('Not a appendable file', str(err)) + + # access closed file + append_f: oss.AppendOnlyFile = None + key = 'new-append_file-' + random_str(6) + self.assertFalse(self.client.is_object_exist(self.bucket_name, key)) + with self.client.appen_file(self.bucket_name, key) as f: + self.assertEqual(0, f.tell()) + f.write(b'123') + append_f = f + self.assertEqual(False, append_f.closed) + + self.assertTrue(self.client.is_object_exist(self.bucket_name, key)) + self.assertIsNotNone(append_f) + + # flush + try: + append_f.flush() + self.fail('shoud not here') + except Exception as err: + self.assertIn('I/O operation on closed file.', str(err)) + + # tell + try: + append_f.tell() + self.fail('shoud not here') + except Exception as err: + self.assertIn('I/O operation on closed file.', str(err)) + + # writable + try: + append_f.writable() + self.fail('shoud not here') + except Exception as err: + self.assertIn('I/O operation on closed file.', str(err)) + + # write + try: + append_f.write(b'123') + self.fail('shoud not here') + except Exception as err: + self.assertIn('I/O operation on closed file.', str(err)) + + # write non bytes data + append_f: oss.AppendOnlyFile = None + key = 'new-append_file-' + random_str(6) + self.assertFalse(self.client.is_object_exist(self.bucket_name, key)) + with self.client.appen_file(self.bucket_name, key) as f: + self.assertEqual(0, f.tell()) + try: + f.write('123') + self.fail('shoud not here') + except Exception as err: + self.assertIn('Not a bytes type, got ', str(err)) + + def test_append_file_write_from(self): + key = 'append_file-write_from' + random_str(6) + data1 = random_str(12345) + data2 = random_str(23456) + data3 = random_str(34567) + data4 = random_str(200*1024 + 45678) + data5 = random_str(1111) + all_data = data1 + data2 + data3 + data4 + data5 + + str_data = data1 + bytes_data = data2.encode() + bytesio_data = io.BytesIO(data3.encode()) + stringio_data = io.StringIO(data4) + + self.assertFalse(self.client.is_object_exist(self.bucket_name, key)) + + with self.client.appen_file(self.bucket_name, key) as f: + n = f.write_from(str_data) + self.assertEqual(len(data1), n) + + n = f.write_from(bytes_data) + self.assertEqual(len(data2), n) + + n = f.write_from(bytesio_data) + self.assertEqual(len(data3), n) + + n = f.write_from(stringio_data) + self.assertEqual(len(data4), n) + + n = f.write(data5.encode()) + self.assertEqual(len(data5), n) + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(all_data), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual(all_data.encode(), result.body.content) + + + def test_append_file_write_from_file(self): + key = 'append_file-write_from_file' + random_str(6) + filepath = "./tests/data/example.jpg" + + example_data = b'' + with open(filepath, 'rb') as f: + example_data = f.read() + + with open(filepath, 'rb') as f: + with self.client.appen_file(self.bucket_name, key) as ff: + n = ff.write_from(f) + self.assertEqual(len(example_data), n) + + result = self.client.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(example_data), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual(example_data, result.body.content) + + + def test_append_file_with_metadata(self): + key = 'append_file-with_metadata' + random_str(6) + data = b'hello world' + data1 = b'hello oss' + data2 = b'just for test' + with self.client.appen_file(self.bucket_name, key, create_parameter=oss.AppendObjectRequest( + acl='public-read', + storage_class='IA', + content_type='plain/txt', + metadata={'user':"test"} + )) as f: + n = f.write(data) + self.assertEqual(len(data), n) + + n = f.write(data1) + self.assertEqual(len(data1), n) + + result = self.client.head_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(data + data1), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual('IA', result.storage_class) + self.assertEqual('test', result.metadata.get('user')) + self.assertEqual('plain/txt', result.content_type) + + with self.client.appen_file(self.bucket_name, key, create_parameter=oss.AppendObjectRequest( + acl='public-read', + storage_class='IA', + content_type='plain/js', + metadata={'user':"test"} + )) as f: + n = f.write(data2) + self.assertEqual(len(data2), n) + + result = self.client.head_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertEqual(len(data + data1 + data2), result.content_length) + self.assertEqual('Appendable', result.object_type) + self.assertEqual('IA', result.storage_class) + self.assertEqual('test', result.metadata.get('user')) + self.assertEqual('plain/txt', result.content_type) + + + +class TestReadOnlyFile(TestIntegration): + def test_open_file_baisc(self): + data_size = 1234 + key = 'check_member-' + random_str(6) + data = random_str(data_size).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + rf: oss.ReadOnlyFile = None + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + rf = f + self.assertEqual(0, f.tell()) + self.assertEqual(True, f.seekable()) + self.assertEqual(True, f.readable()) + self.assertEqual(False, f.closed) + self.assertEqual(f'oss://{self.bucket_name}/{key}', f.name) + self.assertEqual('rb', f.mode) + + #seek, tell + f.seek(0, os.SEEK_SET) + self.assertEqual(0, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + f.seek(1, os.SEEK_SET) + self.assertEqual(1, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + f.seek(data_size, os.SEEK_SET) + self.assertEqual(data_size, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + f.seek(-data_size, os.SEEK_END) + self.assertEqual(0, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + f.seek(-1, os.SEEK_END) + self.assertEqual(data_size - 1, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[data_size -1:], b) + + f.seek(0, os.SEEK_END) + self.assertEqual(data_size, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(b'', b) + + f.seek(123, os.SEEK_SET) + self.assertEqual(123, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + f.seek(123, os.SEEK_CUR) + self.assertEqual(248, f.tell()) + offset = f.tell() + b = f.read(2) + self.assertEqual(data[offset:offset + 2], b) + + self.assertEqual(True, rf.closed) + self.assertEqual(None, rf._read_buf) + self.assertEqual(None, rf._stream_reader) + self.assertEqual(None, rf._stream_iter) + + #call close many times + rf.close() + rf.close() + + rf = self.client.open_file(self.bucket_name, key) + self.assertIsNotNone(rf) + self.assertEqual(False, rf.closed) + with rf as f: + self.assertEqual(False, f.closed) + self.assertEqual(True, rf.closed) + + + def test_open_file_read_size(self): + key = 'read_size-' + random_str(6) + data = random_str(100*1024*5 + 1234).encode() + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + # read with size + rf: oss.ReadOnlyFile = None + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + rf = f + end = 129 + for i in range(0, end): + size = 200*1024 + 12345 - i + f.seek(i, 0) + self.assertEqual(i, f.tell()) + got = f.read(size) + self.assertEqual(size, len(got)) + self.assertEqual(data[i:i+size], got) + self.assertEqual(i + size, f.tell()) + + self.assertEqual(True, rf.closed) + + def test_open_file_readall(self): + key = 'readall-' + random_str(6) + data = random_str(100*1024*5 + 1234).encode() + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + # read all + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + f.seek(123) + self.assertEqual(123, f.tell()) + got = f.readall() + self.assertEqual(data[123:], got) + self.assertEqual(len(data), f.tell()) + + f.seek(1234) + got1 = f.read(17) + self.assertEqual(1234 + 17, f.tell()) + self.assertEqual(data[1234:1234 + 17], got1) + got2 = f.read() + self.assertEqual(len(data), f.tell()) + self.assertEqual(data[1234 + 17:], got2) + self.assertEqual(data[1234:], got1 + got2) + + f.seek(12345) + got1 = f.read(172) + self.assertEqual(12345 + 172, f.tell()) + self.assertEqual(data[12345:12345 + 172], got1) + got2 = f.readall() + self.assertEqual(len(data), f.tell()) + self.assertEqual(data[12345 + 172:], got2) + self.assertEqual(data[12345:], got1 + got2) + + def test_open_file_readinto(self): + key = 'readinto-' + random_str(6) + data = random_str(100*1024*5 + 1234).encode() + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + # read into bytearray + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + b = bytearray(11) + self.assertEqual(0, f.tell()) + n = f.readinto(b) + self.assertEqual(11, n) + self.assertEqual(data[0:11], b) + self.assertEqual(11, f.tell()) + + b = bytearray(9) + n = f.readinto(b) + self.assertEqual(9, n) + self.assertEqual(data[11:20], b) + self.assertEqual(20, f.tell()) + + b = bytearray(len(data)) + f.seek(12345) + n = f.readinto(b) + self.assertEqual(len(data) - 12345, n) + self.assertEqual(len(data), f.tell()) + + b = bytearray(len(data) * 2) + f.seek(1234) + n = f.readinto(b) + self.assertEqual(len(data) - 1234, n) + self.assertEqual(len(data), f.tell()) + self.assertEqual(data[1234:], b[:len(data)-1234]) + + # read into blob = memoryview(bytearray(size)) + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + blob = memoryview(bytearray(len(data))) + self.assertEqual(0, f.tell()) + n = f.readinto(blob[0:11]) + self.assertEqual(11, n) + self.assertEqual(data[0:11], blob[0:11]) + self.assertEqual(11, f.tell()) + + n = f.readinto(blob[11:20]) + self.assertEqual(9, n) + self.assertEqual(data[11:20], blob[11:20]) + self.assertEqual(20, f.tell()) + + #remains + n = f.readinto(blob) + self.assertEqual(len(data) - 20, n) + self.assertEqual(data[20:], blob[0:n]) + self.assertEqual(len(data), f.tell()) + + def test_open_file_fail(self): + key = 'fail-test-' + random_str(6) + nokey = key + 'no-key' + data = random_str(1234).encode() + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + # open fail + try: + with self.client.open_file(self.bucket_name, nokey) as f: + self.assertIsNotNone(f) + self.fail('should not here') + except oss.PathError as err: + self.assertIn('stat_object', str(err)) + self.assertIn(f'oss://{self.bucket_name}/{nokey}', str(err)) + + # seek fail + try: + with self.client.open_file(self.bucket_name, key) as f: + f.seek(len(data) + 1, os.SEEK_SET) + self.fail('should not here') + except oss.PathError as err: + self.assertIn('seek', str(err)) + self.assertIn('offset is unavailable', str(err)) + + try: + with self.client.open_file(self.bucket_name, key) as f: + f.seek(-1, os.SEEK_SET) + self.fail('should not here') + except oss.PathError as err: + self.assertIn('seek', str(err)) + self.assertIn('negative seek position', str(err)) + + try: + with self.client.open_file(self.bucket_name, key) as f: + f.seek(0, 3) + self.fail('should not here') + except oss.PathError as err: + self.assertIn('seek', str(err)) + self.assertIn('unsupported whence value', str(err)) + + try: + with self.client.open_file(self.bucket_name, key) as f: + f.seek('123', 3) + self.fail('should not here') + except oss.PathError as err: + self.assertIn('seek', str(err)) + self.assertIn('is not an integer', str(err)) + + # call after close + rf: oss.ReadOnlyFile = None + with self.client.open_file(self.bucket_name, key) as f: + self.assertIsNotNone(f) + rf = f + + try: + rf.read() + except oss.PathError as err: + self.assertIn('read', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.readall() + except oss.PathError as err: + self.assertIn('read', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.readinto(bytearray(123)) + except oss.PathError as err: + self.assertIn('read', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.seek(0, os.SEEK_CUR) + except oss.PathError as err: + self.assertIn('seek', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.tell() + except oss.PathError as err: + self.assertIn('tell', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.readable() + except oss.PathError as err: + self.assertIn('readable', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + try: + rf.seekable() + except oss.PathError as err: + self.assertIn('seekable', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + + try: + with rf as f: + pass + except oss.PathError as err: + self.assertIn('enter', str(err)) + self.assertIn('I/O operation on closed file.', str(err)) + + def test_open_file_resume_read(self): + key = 'resume_read-' + random_str(6) + data = random_str(200*1024 + 1234).encode() + + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + with self.client.open_file(self.bucket_name, key) as f: + b1 = f.read(1234) + # wait stream close + f._stream_iter = None + #time.sleep(120) + b2 = f.read() + self.assertEqual(data, b1 + b2) + + def test_open_file_source_changed(self): + key = 'source_changed-' + random_str(6) + data1 = random_str(200*1024 + 1234).encode() + data2 = random_str(201*1024 + 1234).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data1 + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + with self.client.open_file(self.bucket_name, key) as f: + b1 = f.read(1234) + self.assertEqual(data1[0:len(b1)], b1) + + #change file + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data2 + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + try: + f.seek(0) + f.readall() + except oss.PathError as err: + self.assertIn('get_object', str(err)) + self.assertIn('Source file is changed, origin info', str(err)) + + def test_open_file_prefetch_read(self): + key = 'prefetch-' + random_str(6) + data_len = 11*200*1024 + 1234 + data = random_str(data_len).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + rf: oss.ReadOnlyFile = None + with self.client.open_file( + self.bucket_name, key, + enable_prefetch=True, + prefetch_num=3, + chunk_size=2*200*1024, + prefetch_threshold = 0) as f: + + rf = f + self.assertEqual(True, f._enable_prefetch) + self.assertEqual(3, f._prefetch_num) + self.assertEqual(2*200*1024, f._chunk_size) + self.assertEqual(0, f._prefetch_threshold) + + # size + start = f.seek(0, os.SEEK_SET) + end = f.seek(0, os.SEEK_END) + self.assertEqual(data_len, end - start) + + #print('\nreadall') + # readall + f.seek(0, os.SEEK_SET) + b = f.readall() + self.assertEqual(data, b) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + self.assertIsNotNone(f._executor) + self.assertEqual(3, f._executor._max_workers) + + b = f.readall() + self.assertEqual(b'', b) + + #print('seek readN') + # seek readN + for _ in range(0, 64): + offset = random.randint(0, data_len // 5) + n = random.randint(0, data_len // 4) + 3*200*1024 + begin = f.seek(offset, os.SEEK_SET) + f._num_ooo_read = 0 + self.assertEqual(offset, begin) + #print(f'seek readN {offset} {n}') + b = f.read(n) + self.assertEqual(n, len(b)) + self.assertEqual(data[offset:offset + n], b) + if n % f._chunk_size > 0: + self.assertGreater(len(f._prefetch_readers), 1) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + + #print('seek read from offset to end') + # seek read from offset to end + for _ in range(0, 64): + offset = random.randint(0, data_len // 5) + begin = f.seek(offset, os.SEEK_SET) + f._num_ooo_read = 0 + self.assertEqual(offset, begin) + b = f.read() + self.assertEqual(data_len - offset, len(b)) + self.assertEqual(data[offset:], b) + self.assertEqual(0, len(f._prefetch_readers)) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + + #print('seek readInto N') + # seek readInto N + for _ in range(0, 64): + offset = random.randint(0, data_len // 5) + n = random.randint(0, data_len // 4) + 3*200*1024 + begin = f.seek(offset, os.SEEK_SET) + f._num_ooo_read = 0 + self.assertEqual(offset, begin) + blob = memoryview(bytearray(n)) + got = f.readinto(blob) + self.assertEqual(n, got) + self.assertEqual(data[offset:offset + n], blob[0:]) + if n % f._chunk_size > 0: + self.assertGreater(len(f._prefetch_readers), 1) + self.assertIsNotNone(f._generator) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + + #print('seek readInto from offset to end') + # seek readInto from offset to end + bloball = memoryview(bytearray(data_len)) + for _ in range(0, 64): + offset = random.randint(0, data_len // 5) + begin = f.seek(offset, os.SEEK_SET) + f._num_ooo_read = 0 + self.assertEqual(offset, begin) + got = f.readinto(bloball) + self.assertEqual(data_len - offset, got) + self.assertEqual(data[offset:], bloball[0:got]) + self.assertEqual(0, len(f._prefetch_readers)) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + + self.assertEqual(None, rf._read_buf) + self.assertEqual(None, rf._stream_reader) + self.assertEqual(None, rf._prefetch_readers) + self.assertEqual(None, rf._generator) + self.assertEqual(None, rf._executor) + + #call close many times + rf.close() + rf.close() + + def test_open_file_mix_read(self): + key = 'mix-' + random_str(6) + data_len = 11*200*1024 + 12345 + data = random_str(data_len).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + rf: oss.ReadOnlyFile = None + with self.client.open_file( + self.bucket_name, key, + enable_prefetch=True, + prefetch_num=3, + chunk_size=1*200*1024, + prefetch_threshold = 5*200*1024) as f: + + rf = f + self.assertEqual(True, f._enable_prefetch) + self.assertEqual(3, f._prefetch_num) + self.assertEqual(1*200*1024, f._chunk_size) + self.assertEqual(5*200*1024, f._prefetch_threshold) + + # read some + some1 = 3 * 100 * 1024 + 123 + f.seek(0, os.SEEK_SET) + b1 = f.read(some1) + self.assertEqual(data[0:some1], b1) + self.assertIsNotNone(f._stream_reader) + self.assertIsNotNone(f._stream_iter) + self.assertIsNone(f._generator) + self.assertIsNone(f._executor) + + # read some + some2 = 8 * 100 * 1024 + 123 + self.assertGreater(some1 + some2, f._prefetch_threshold) + b2 = f.read(some2) + self.assertEqual(data[some1:some1 + some2], b2) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + self.assertIsNotNone(f._executor) + self.assertEqual(3, f._executor._max_workers) + + # read last + b3 = f.readall() + self.assertEqual(data, b1 + b2 + b3) + + self.assertEqual(None, rf._read_buf) + self.assertEqual(None, rf._stream_reader) + self.assertEqual(None, rf._prefetch_readers) + self.assertEqual(None, rf._generator) + self.assertEqual(None, rf._executor) + + # seq read, seek, read all + with self.client.open_file( + self.bucket_name, key, + enable_prefetch=True, + prefetch_num=3, + chunk_size=1*200*1024, + prefetch_threshold = 5*200*1024) as f: + + self.assertEqual(True, f._enable_prefetch) + self.assertEqual(3, f._prefetch_num) + self.assertEqual(1*200*1024, f._chunk_size) + self.assertEqual(5*200*1024, f._prefetch_threshold) + + # read some + off1 = 1 + some1 = 3 * 100 * 1024 + 123 + f.seek(off1, os.SEEK_SET) + b1 = f.read(some1) + self.assertEqual(data[off1:off1 + some1], b1) + self.assertIsNotNone(f._stream_reader) + self.assertIsNotNone(f._stream_iter) + self.assertIsNone(f._generator) + self.assertIsNone(f._executor) + + # read some + off2 = 100 + some2 = 15 * 100 * 1024 + 123 + self.assertGreater(some2, f._prefetch_threshold) + f.seek(off2, os.SEEK_SET) + b2 = f.read(some2) + self.assertEqual(data[off2:off2 + some2], b2) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + self.assertIsNotNone(f._executor) + self.assertEqual(3, f._executor._max_workers) + + def test_open_file_prefetch_source_changed(self): + key = 'prefetch_source_changed-' + random_str(6) + data1 = random_str(11*200*1024 + 12345).encode() + data2 = random_str(11*200*1024 + 12345).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data1 + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + with self.client.open_file( + self.bucket_name, key, + enable_prefetch=True, + prefetch_num=3, + chunk_size=2*200*1024, + prefetch_threshold = 0) as f: + + len1 = 3 * 200 * 1024 + 12 + b1 = f.read(len1) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + self.assertIsNotNone(f._executor) + self.assertEqual(data1[0:len1], b1) + + #change file + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data2 + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + # read data saved in the buffer + len2 = 1 * 200 * 1024 + b2 = f.read(len2) + self.assertEqual(data1[len1:len1 + len2], b2) + + # read remains + try: + f.readall() + except oss.PathError as err: + self.assertIn('get_object', str(err)) + self.assertIn('Source file is changed, origin info', str(err)) + + def test_open_file_mix_read2(self): + key = 'prefetch-' + random_str(6) + data_len = 6*100*1024 + 1234 + data = random_str(data_len).encode() + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=data + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + with self.client.open_file( + self.bucket_name, key, + enable_prefetch=True, + prefetch_num=2, + chunk_size=1*200*1024, + prefetch_threshold = 1*200*1024) as f: + + self.assertEqual(True, f._enable_prefetch) + self.assertEqual(2, f._prefetch_num) + self.assertEqual(1*200*1024, f._chunk_size) + self.assertEqual(1*200*1024, f._prefetch_threshold) + + len1 = 12345 + b1 = f.read(len1) + self.assertEqual(data[0:len1], b1) + self.assertIsNotNone(f._stream_reader) + self.assertIsNotNone(f._stream_iter) + self.assertIsNone(f._generator) + self.assertIsNone(f._executor) + + len2 = 1*200*1024 + b2 = f.read(len2) + self.assertEqual(data[len1:len1 + len2], b2) + self.assertIsNone(f._stream_reader) + self.assertIsNone(f._stream_iter) + self.assertIsNotNone(f._generator) + self.assertIsNotNone(f._executor) + + # set reader fail + f._prefetch_readers[0]._failed = True + len3 = 1*100*1024 + b3 = f.read(len3) + self.assertEqual(data[:len1 + len2 + len3], b1 + b2 + b3) + self.assertIsNotNone(f._stream_reader) + self.assertIsNotNone(f._stream_iter) + self.assertIsNone(f._generator) + self.assertIsNotNone(f._executor) diff --git a/tests/integration/test_encryption_client.py b/tests/integration/test_encryption_client.py new file mode 100644 index 0000000..2d04f1c --- /dev/null +++ b/tests/integration/test_encryption_client.py @@ -0,0 +1,470 @@ +# pylint: skip-file +import os +import tempfile +from typing import cast +import alibabacloud_oss_v2 as oss +from . import TestIntegration, random_bucket_name, random_str, REGION, OBJECTNAME_PREFIX + +RSA_PUBLIC_KEY = """-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCokfiAVXXf5ImFzKDw+XO/UByW +6mse2QsIgz3ZwBtMNu59fR5zttSx+8fB7vR4CN3bTztrP9A6bjoN0FFnhlQ3vNJC +5MFO1PByrE/MNd5AAfSVba93I6sx8NSk5MzUCA4NJzAUqYOEWGtGBcom6kEF6MmR +1EKib1Id8hpooY5xaQIDAQAB +-----END PUBLIC KEY-----""" + +RSA_PRIVATE_KEY = """-----BEGIN PRIVATE KEY----- +MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAKiR+IBVdd/kiYXM +oPD5c79QHJbqax7ZCwiDPdnAG0w27n19HnO21LH7x8Hu9HgI3dtPO2s/0DpuOg3Q +UWeGVDe80kLkwU7U8HKsT8w13kAB9JVtr3cjqzHw1KTkzNQIDg0nMBSpg4RYa0YF +yibqQQXoyZHUQqJvUh3yGmihjnFpAgMBAAECgYA49RmCQ14QyKevDfVTdvYlLmx6 +kbqgMbYIqk+7w611kxoCTMR9VMmJWgmk/Zic9mIAOEVbd7RkCdqT0E+xKzJJFpI2 +ZHjrlwb21uqlcUqH1Gn+wI+jgmrafrnKih0kGucavr/GFi81rXixDrGON9KBE0FJ +cPVdc0XiQAvCBnIIAQJBANXu3htPH0VsSznfqcDE+w8zpoAJdo6S/p30tcjsDQnx +l/jYV4FXpErSrtAbmI013VYkdJcghNSLNUXppfk2e8UCQQDJt5c07BS9i2SDEXiz +byzqCfXVzkdnDj9ry9mba1dcr9B9NCslVelXDGZKvQUBqNYCVxg398aRfWlYDTjU +IoVVAkAbTyjPN6R4SkC4HJMg5oReBmvkwFCAFsemBk0GXwuzD0IlJAjXnAZ+/rIO +ItewfwXIL1Mqz53lO/gK+q6TR585AkB304KUIoWzjyF3JqLP3IQOxzns92u9EV6l +V2P+CkbMPXiZV6sls6I4XppJXX2i3bu7iidN3/dqJ9izQK94fMU9AkBZvgsIPCot +y1/POIbv9LtnviDKrmpkXgVQSU4BmTPvXwTJm8APC7P/horSh3SVf1zgmnsyjm9D +hO92gGc+4ajL +-----END PRIVATE KEY-----""" +class TestEncryptionCLient(TestIntegration): + + def test_put_object(self): + mc = oss.crypto.MasterRsaCipher( + mat_desc={"tag": "value"}, + public_key=RSA_PUBLIC_KEY, + private_key=RSA_PRIVATE_KEY + ) + eclient = oss.EncryptionClient(self.client, mc) + + # str + body = 'hello world' + key = 'object-str.bin' + result = eclient.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=body + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectResult) + self.assertEqual(200, result.status_code) + + gresult = eclient.unwrap().get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(gresult) + self.assertIsInstance(gresult, oss.GetObjectResult) + self.assertEqual(200, gresult.status_code) + self.assertIsNotNone(gresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(gresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', gresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', gresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', gresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(gresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertIsNone(gresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-length', None)) + + self.assertEqual(len(body), len(gresult.body.content)) + self.assertNotEqual(body.encode(), gresult.body.content) + + egresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(egresult) + self.assertIsInstance(egresult, oss.GetObjectResult) + self.assertEqual(200, egresult.status_code) + self.assertIsNotNone(egresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(egresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', egresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', egresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', egresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(egresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertIsNone(egresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-length', None)) + + self.assertEqual(len(body), len(egresult.body.content)) + self.assertEqual(body.encode(), egresult.body.content) + + # bytes + body = b'hello world 123' + key = 'object-bytes.bin' + result = eclient.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + content_length=str(len(body)), + body=body + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectResult) + self.assertEqual(200, result.status_code) + + egresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(egresult) + self.assertIsInstance(egresult, oss.GetObjectResult) + self.assertEqual(200, egresult.status_code) + self.assertIsNotNone(egresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(egresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', egresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', egresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', egresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(egresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertEqual(str(len(body)), egresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-length', None)) + + self.assertEqual(len(body), len(egresult.body.content)) + self.assertEqual(b'hello world 123', egresult.body.content) + + # file + example_data = b'' + key = 'object-file.bin' + with open("./tests/data/example.jpg", 'rb') as f: + result = eclient.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=f + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectResult) + self.assertEqual(200, result.status_code) + + f.seek(0, os.SEEK_SET) + example_data = f.read() + + egresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(egresult) + self.assertIsInstance(egresult, oss.GetObjectResult) + self.assertEqual(200, egresult.status_code) + self.assertEqual(21839, len(example_data)) + self.assertEqual(example_data, egresult.body.content) + + #iterable + key = 'object-iterable.bin' + result = eclient.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=key, + body=iter(example_data) + )) + self.assertIsNotNone(result) + self.assertIsInstance(result, oss.PutObjectResult) + self.assertEqual(200, result.status_code) + + egresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key, + )) + self.assertIsNotNone(egresult) + self.assertIsInstance(egresult, oss.GetObjectResult) + self.assertEqual(200, egresult.status_code) + self.assertEqual(21839, len(example_data)) + self.assertEqual(example_data, egresult.body.content) + + def test_multipart_from_bytes(self): + part_size = 100 * 1024 + data_size = 3 * part_size + 1245 + data = random_str(data_size).encode() + + key = 'multipart-bytes.bin' + mc = oss.crypto.MasterRsaCipher( + mat_desc={"tag": "value"}, + public_key=RSA_PUBLIC_KEY, + private_key=RSA_PRIVATE_KEY + ) + eclient = oss.EncryptionClient(self.client, mc) + + #init + initresult = eclient.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + cse_part_size=part_size, + cse_data_size=data_size + )) + self.assertIsNotNone(initresult) + self.assertIsInstance(initresult, oss.InitiateMultipartUploadResult) + self.assertEqual(200, initresult.status_code) + self.assertIsNotNone(initresult.cse_multipart_context) + self.assertIsInstance(initresult.cse_multipart_context, oss.EncryptionMultiPartContext) + cse_context = cast(oss.EncryptionMultiPartContext, initresult.cse_multipart_context) + self.assertEqual(part_size, cse_context.part_size) + self.assertEqual(data_size, cse_context.data_size) + self.assertIsInstance(cse_context.content_cipher, oss.crypto.ContentCipher) + + #upload part + part_number = 1 + upload_parts = [] + for start in range(0, data_size, part_size): + end = start + part_size + if end > data_size: + end = data_size + upresult = eclient.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + part_number=part_number, + cse_multipart_context=cse_context, + body=data[start:end] + )) + self.assertIsNotNone(upresult) + self.assertIsInstance(upresult, oss.UploadPartResult) + self.assertEqual(200, upresult.status_code) + upload_parts.append(oss.UploadPart(part_number=part_number, etag=upresult.etag)) + part_number += 1 + + self.assertEqual(4, len(upload_parts)) + + #listpart + lpresult = eclient.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id + )) + self.assertIsNotNone(lpresult) + self.assertIsInstance(lpresult, oss.ListPartsResult) + self.assertEqual(200, lpresult.status_code) + self.assertIsNotNone(lpresult.client_encryption_key) + self.assertIsNotNone(lpresult.client_encryption_start) + self.assertEqual(part_size, lpresult.client_encryption_part_size) + self.assertEqual(data_size, lpresult.client_encryption_data_size) + self.assertEqual("RSA/NONE/PKCS1Padding", lpresult.client_encryption_wrap_alg) + self.assertEqual("AES/CTR/NoPadding", lpresult.client_encryption_cek_alg) + + #complete + parts = sorted(upload_parts, key=lambda p: p.part_number) + cmresult = eclient.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=parts + ) + )) + self.assertIsNotNone(cmresult) + self.assertIsInstance(cmresult, oss.CompleteMultipartUploadResult) + self.assertEqual(200, cmresult.status_code) + + # get object and check + rawresult = eclient.unwrap().get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(rawresult) + self.assertIsInstance(rawresult, oss.GetObjectResult) + self.assertEqual(200, rawresult.status_code) + self.assertIsNotNone(rawresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(rawresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', rawresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', rawresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', rawresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(rawresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertEqual(data_size, len(rawresult.body.content)) + self.assertNotEqual(data, rawresult.body.content) + + goresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(goresult) + self.assertIsInstance(goresult, oss.GetObjectResult) + self.assertEqual(200, goresult.status_code) + self.assertIsNotNone(goresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(goresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', goresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', goresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', goresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(goresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertEqual(data_size, len(goresult.body.content)) + self.assertEqual(data, goresult.body.content) + + + def test_multipart_from_file(self): + part_size = 100 * 1024 + data_size = 3 * part_size + 1245 + data = random_str(data_size).encode() + key = 'multipart-file.bin' + mc = oss.crypto.MasterRsaCipher( + mat_desc={"tag": "value"}, + public_key=RSA_PUBLIC_KEY, + private_key=RSA_PRIVATE_KEY + ) + eclient = oss.EncryptionClient(self.client, mc) + + #init + initresult = eclient.initiate_multipart_upload(oss.InitiateMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + cse_part_size=part_size, + cse_data_size=data_size + )) + self.assertIsNotNone(initresult) + self.assertIsInstance(initresult, oss.InitiateMultipartUploadResult) + self.assertEqual(200, initresult.status_code) + self.assertIsNotNone(initresult.cse_multipart_context) + self.assertIsInstance(initresult.cse_multipart_context, oss.EncryptionMultiPartContext) + cse_context = cast(oss.EncryptionMultiPartContext, initresult.cse_multipart_context) + self.assertEqual(part_size, cse_context.part_size) + self.assertEqual(data_size, cse_context.data_size) + self.assertIsInstance(cse_context.content_cipher, oss.crypto.ContentCipher) + + #upload part + part_number = 1 + upload_parts = [] + with tempfile.TemporaryFile('w+b') as f: + f.write(data) + for start in range(0, data_size, part_size): + n = part_size + if start + n > data_size: + n = data_size - start + reader = oss.io_utils.SectionReader(oss.io_utils.ReadAtReader(f), start, n) + upresult = eclient.upload_part(oss.UploadPartRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + part_number=part_number, + cse_multipart_context=cse_context, + body=reader + )) + self.assertIsNotNone(upresult) + self.assertIsInstance(upresult, oss.UploadPartResult) + self.assertEqual(200, upresult.status_code) + upload_parts.append(oss.UploadPart(part_number=part_number, etag=upresult.etag)) + part_number += 1 + + self.assertEqual(4, len(upload_parts)) + + #listpart + lpresult = eclient.list_parts(oss.ListPartsRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id + )) + self.assertIsNotNone(lpresult) + self.assertIsInstance(lpresult, oss.ListPartsResult) + self.assertEqual(200, lpresult.status_code) + self.assertIsNotNone(lpresult.client_encryption_key) + self.assertIsNotNone(lpresult.client_encryption_start) + self.assertEqual(part_size, lpresult.client_encryption_part_size) + self.assertEqual(data_size, lpresult.client_encryption_data_size) + self.assertEqual("RSA/NONE/PKCS1Padding", lpresult.client_encryption_wrap_alg) + self.assertEqual("AES/CTR/NoPadding", lpresult.client_encryption_cek_alg) + + #complete + parts = sorted(upload_parts, key=lambda p: p.part_number) + cmresult = eclient.complete_multipart_upload(oss.CompleteMultipartUploadRequest( + bucket=self.bucket_name, + key=key, + upload_id=initresult.upload_id, + complete_multipart_upload=oss.CompleteMultipartUpload( + parts=parts + ) + )) + self.assertIsNotNone(cmresult) + self.assertIsInstance(cmresult, oss.CompleteMultipartUploadResult) + self.assertEqual(200, cmresult.status_code) + + # get object and check + rawresult = eclient.unwrap().get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(rawresult) + self.assertIsInstance(rawresult, oss.GetObjectResult) + self.assertEqual(200, rawresult.status_code) + self.assertIsNotNone(rawresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(rawresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', rawresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', rawresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', rawresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(rawresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertEqual(data_size, len(rawresult.body.content)) + self.assertNotEqual(data, rawresult.body.content) + + goresult = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=key + )) + self.assertIsNotNone(goresult) + self.assertIsInstance(goresult, oss.GetObjectResult) + self.assertEqual(200, goresult.status_code) + self.assertIsNotNone(goresult.headers.get('x-oss-meta-client-side-encryption-start', None)) + self.assertIsNotNone(goresult.headers.get('x-oss-meta-client-side-encryption-key', None)) + self.assertEqual('{"tag": "value"}', goresult.headers.get('x-oss-meta-client-side-encryption-matdesc', None)) + self.assertEqual('AES/CTR/NoPadding', goresult.headers.get('x-oss-meta-client-side-encryption-cek-alg', None)) + self.assertEqual('RSA/NONE/PKCS1Padding', goresult.headers.get('x-oss-meta-client-side-encryption-wrap-alg', None)) + self.assertIsNone(goresult.headers.get('x-oss-meta-client-side-encryption-unencrypted-content-md5', None)) + self.assertEqual(data_size, len(goresult.body.content)) + self.assertEqual(data, goresult.body.content) + + + def test_compatibility(self): + + rsa_private = """-----BEGIN RSA PRIVATE KEY----- + MIICWwIBAAKBgQCokfiAVXXf5ImFzKDw+XO/UByW6mse2QsIgz3ZwBtMNu59fR5z + ttSx+8fB7vR4CN3bTztrP9A6bjoN0FFnhlQ3vNJC5MFO1PByrE/MNd5AAfSVba93 + I6sx8NSk5MzUCA4NJzAUqYOEWGtGBcom6kEF6MmR1EKib1Id8hpooY5xaQIDAQAB + AoGAOPUZgkNeEMinrw31U3b2JS5sepG6oDG2CKpPu8OtdZMaAkzEfVTJiVoJpP2Y + nPZiADhFW3e0ZAnak9BPsSsySRaSNmR465cG9tbqpXFKh9Rp/sCPo4Jq2n65yood + JBrnGr6/xhYvNa14sQ6xjjfSgRNBSXD1XXNF4kALwgZyCAECQQDV7t4bTx9FbEs5 + 36nAxPsPM6aACXaOkv6d9LXI7A0J8Zf42FeBV6RK0q7QG5iNNd1WJHSXIITUizVF + 6aX5NnvFAkEAybeXNOwUvYtkgxF4s28s6gn11c5HZw4/a8vZm2tXXK/QfTQrJVXp + VwxmSr0FAajWAlcYN/fGkX1pWA041CKFVQJAG08ozzekeEpAuByTIOaEXgZr5MBQ + gBbHpgZNBl8Lsw9CJSQI15wGfv6yDiLXsH8FyC9TKs+d5Tv4Cvquk0efOQJAd9OC + lCKFs48hdyaiz9yEDsc57PdrvRFepVdj/gpGzD14mVerJbOiOF6aSV19ot27u4on + Td/3aifYs0CveHzFPQJAWb4LCDwqLctfzziG7/S7Z74gyq5qZF4FUElOAZkz718E + yZvADwuz/4aK0od0lX9c4Jp7Mo5vQ4TvdoBnPuGoyw== + -----END RSA PRIVATE KEY-----""" + + objectname = 'enc-example.jpg' + example_data = b'' + with open("./tests/data/example.jpg", 'rb') as f: + example_data = f.read() + self.assertTrue(len(example_data) > 0) + + with open("./tests/data/enc-example.jpg", 'rb') as f: + result = self.client.put_object(oss.PutObjectRequest( + bucket=self.bucket_name, + key=objectname, + body=f, + metadata= { + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaT0w7u2yQoDLkSnK8MFg/MwYdh4na4/LS8LLbLcM18m8I/ObWUHU775I50sJCpdv+f4e0jLeVRRiDFWe+uo7Puc9j4xHj8YB3QlcIOFQiTxHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwox4WhLGng5DK2vNXxULmulMUUpYkdc9umqmDilgSy5Z3Foafw+v4JJThfw68T/9G2gxZLrQTbAlvFPFfPM9Ehk6cY4+8WpY32uN8w5vrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + } + )) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual(24, len(result.request_id)) + self.assertEqual(24, len(result.headers.get('x-oss-request-id'))) + + mc = oss.crypto.MasterRsaCipher(mat_desc={"tag": "value"}, private_key=rsa_private) + eclient = oss.EncryptionClient(self.client, mc) + + # read all + result = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=objectname + )) + self.assertEqual(example_data, result.body.content) + result.body.close() + + #range read and iter_bytes + for i in range(0, 33): + result = eclient.get_object(oss.GetObjectRequest( + bucket=self.bucket_name, + key=objectname, + range_header=f'bytes={i}-12345' + )) + size = 12345 - i + 1 + with result.body as f: + data = b'' + for d in f.iter_bytes(block_size=8*1024): + self.assertTrue(len(d) <= 8 *1024) + data += d + self.assertEqual(example_data[i:12346], data) + self.assertEqual(size, len(data)) + diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..9c43df7 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,109 @@ +# pylint: skip-file +from typing import Any +from alibabacloud_oss_v2 import _client +from alibabacloud_oss_v2 import config, credentials +from alibabacloud_oss_v2.types import HttpRequest, HttpResponse, HttpClient + +class MockHttpResponse(HttpResponse): + def __init__(self, **kwargs) -> None: + super(MockHttpResponse, self).__init__() + self._status_code = kwargs.pop("status_code", None) + self._reason = kwargs.pop("reason", None) + self._headers = kwargs.pop("headers", None) + self._body = kwargs.pop("body", None) + self._is_closed = False + self._is_stream_consumed = False + self._request: HttpRequest = None + + @property + def request(self) -> HttpRequest: + return self._request + + @property + def is_closed(self) -> bool: + return self._is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._is_stream_consumed + + @property + def status_code(self) -> int: + return self._status_code or 0 + + @property + def headers(self): + return self._headers or {} + + @property + def reason(self) -> str: + return self._reason or '' + + @property + def content(self) -> bytes: + return self._body + + def __repr__(self) -> str: + return 'MockHttpResponse' + + def __enter__(self) -> "MockHttpResponse": + return self + + def __exit__(self, *args) -> None: + self.close() + + def close(self) -> None: + if not self.is_closed: + self._is_closed = True + + def read(self) -> bytes: + return self.content + + def iter_bytes(self, **kwargs): + data = b'' + block_size = kwargs.get('block_size', 8*1024) + if self._body is not None: + data = self._body + if not isinstance(self._body, (bytes, str)): + raise TypeError(f"not support type {type(self._body)}") + if isinstance(self._body, str): + data = self._body.encode() + + for i in range(0, len(data), block_size): + yield self.content[i : i + block_size] + +class MockHttpClient(HttpClient): + + def __init__(self, request_fn, response_fn, **kwargs) -> None: + super(MockHttpClient, self).__init__() + self._request_fn = request_fn + self._response_fn = response_fn + + def send(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: + if self._request_fn is not None: + self._request_fn(request) + + if self._response_fn is not None: + response = self._response_fn() + response._request = request + return response + + raise NotImplementedError() + + def open(self) -> None: + return + + def close(self) -> None: + return + + +def mock_client(request_fn, response_fn, **kwargs): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.credentials_provider = credentials.AnonymousCredentialsProvider() + cfg.http_client = MockHttpClient( + request_fn=request_fn, + response_fn=response_fn, + kwargs=kwargs + ) + return _client._SyncClientImpl(cfg) \ No newline at end of file diff --git a/tests/unit/crypto/__init__.py b/tests/unit/crypto/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/crypto/test_aes_ctr.py b/tests/unit/crypto/test_aes_ctr.py new file mode 100644 index 0000000..f5fb69c --- /dev/null +++ b/tests/unit/crypto/test_aes_ctr.py @@ -0,0 +1,337 @@ +# pylint: skip-file +import unittest +import io +import base64 +from typing import cast, Any, Iterator +from alibabacloud_oss_v2.types import StreamBody +import alibabacloud_oss_v2.crypto.master_rsa_cipher as rsa_cipher +import alibabacloud_oss_v2.crypto.aes_ctr as aes_ctr +from alibabacloud_oss_v2.crypto.types import CipherData + + +rsa_private = """-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQCokfiAVXXf5ImFzKDw+XO/UByW6mse2QsIgz3ZwBtMNu59fR5z +ttSx+8fB7vR4CN3bTztrP9A6bjoN0FFnhlQ3vNJC5MFO1PByrE/MNd5AAfSVba93 +I6sx8NSk5MzUCA4NJzAUqYOEWGtGBcom6kEF6MmR1EKib1Id8hpooY5xaQIDAQAB +AoGAOPUZgkNeEMinrw31U3b2JS5sepG6oDG2CKpPu8OtdZMaAkzEfVTJiVoJpP2Y +nPZiADhFW3e0ZAnak9BPsSsySRaSNmR465cG9tbqpXFKh9Rp/sCPo4Jq2n65yood +JBrnGr6/xhYvNa14sQ6xjjfSgRNBSXD1XXNF4kALwgZyCAECQQDV7t4bTx9FbEs5 +36nAxPsPM6aACXaOkv6d9LXI7A0J8Zf42FeBV6RK0q7QG5iNNd1WJHSXIITUizVF +6aX5NnvFAkEAybeXNOwUvYtkgxF4s28s6gn11c5HZw4/a8vZm2tXXK/QfTQrJVXp +VwxmSr0FAajWAlcYN/fGkX1pWA041CKFVQJAG08ozzekeEpAuByTIOaEXgZr5MBQ +gBbHpgZNBl8Lsw9CJSQI15wGfv6yDiLXsH8FyC9TKs+d5Tv4Cvquk0efOQJAd9OC +lCKFs48hdyaiz9yEDsc57PdrvRFepVdj/gpGzD14mVerJbOiOF6aSV19ot27u4on +Td/3aifYs0CveHzFPQJAWb4LCDwqLctfzziG7/S7Z74gyq5qZF4FUElOAZkz718E +yZvADwuz/4aK0od0lX9c4Jp7Mo5vQ4TvdoBnPuGoyw== +-----END RSA PRIVATE KEY-----""" + + +class StubStreamBody(StreamBody): + def __init__( + self, + data: bytes, + ) -> None: + self._data = data + def __enter__(self) -> "StubStreamBody": + return self + + def __exit__(self, *args: Any) -> None: + pass + + @property + def is_closed(self) -> bool: + return False + + @property + def is_stream_consumed(self) -> bool: + return False + + @property + def content(self) -> bytes: + return self._data + + + def read(self) -> bytes: + return self._data + + def close(self) -> None: + pass + + def iter_bytes(self, **kwargs: Any) -> Iterator[bytes]: + block_size = kwargs.get("block_size", 4 * 1024) + for d in range(0, len(self._data), block_size): + end = d + block_size + if end > len(self._data): + end = len(self._data) + yield self._data[d:end] + +class TestAesCtr(unittest.TestCase): + def test_constructor(self): + cipher = rsa_cipher.MasterRsaCipher(None, None, rsa_private) + encrypted_key = "nyXOp7delQ/MQLjKQMhHLaT0w7u2yQoDLkSnK8MFg/MwYdh4na4/LS8LLbLcM18m8I/ObWUHU775I50sJCpdv+f4e0jLeVRRiDFWe+uo7Puc9j4xHj8YB3QlcIOFQiTxHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=" + encrypted_iv = "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwox4WhLGng5DK2vNXxULmulMUUpYkdc9umqmDilgSy5Z3Foafw+v4JJThfw68T/9G2gxZLrQTbAlvFPFfPM9Ehk6cY4+8WpY32uN8w5vrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=" + key = cipher.decrypt(base64.b64decode(encrypted_key)) + iv = cipher.decrypt(base64.b64decode(encrypted_iv)) + cipher_data = CipherData( + iv = iv, + key=key, + encrypted_iv=base64.b64decode(encrypted_iv), + encrypted_key=base64.b64decode(encrypted_key), + mat_desc='{"key": "value"}', + wrap_algorithm='RSA/NONE/PKCS1Padding', + cek_algorithm='AES/CTR/NoPadding' + ) + + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=0 + ) + + self.assertEqual(iv, cipher.cipher_data.iv) + self.assertEqual(key, cipher.cipher_data.key) + self.assertEqual('{"key": "value"}', cipher.cipher_data.mat_desc) + self.assertEqual('RSA/NONE/PKCS1Padding', cipher.cipher_data.wrap_algorithm) + self.assertEqual('AES/CTR/NoPadding', cipher.cipher_data.cek_algorithm) + self.assertEqual(0, cipher.offset) + + + try: + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=11 + ) + self.fail('should not here') + except ValueError as err: + self.assertIn("offset is not align to encrypt block", str(err)) + + def test_encrypt(self): + cipher = rsa_cipher.MasterRsaCipher(None, None, rsa_private) + encrypted_key = "nyXOp7delQ/MQLjKQMhHLaT0w7u2yQoDLkSnK8MFg/MwYdh4na4/LS8LLbLcM18m8I/ObWUHU775I50sJCpdv+f4e0jLeVRRiDFWe+uo7Puc9j4xHj8YB3QlcIOFQiTxHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=" + encrypted_iv = "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwox4WhLGng5DK2vNXxULmulMUUpYkdc9umqmDilgSy5Z3Foafw+v4JJThfw68T/9G2gxZLrQTbAlvFPFfPM9Ehk6cY4+8WpY32uN8w5vrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=" + key = cipher.decrypt(base64.b64decode(encrypted_key)) + iv = cipher.decrypt(base64.b64decode(encrypted_iv)) + cipher_data = CipherData( + iv = iv, + key=key, + encrypted_iv=base64.b64decode(encrypted_iv), + encrypted_key=base64.b64decode(encrypted_key), + mat_desc='', + wrap_algorithm='RSA/NONE/PKCS1Padding', + cek_algorithm='AES/CTR/NoPadding' + ) + + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=0 + ) + + example_data = b'' + with open("./tests/data/example.jpg", 'rb') as f: + example_data = f.read() + self.assertTrue(len(example_data) > 0) + + enc_example_data = b'' + with open("./tests/data/enc-example.jpg", 'rb') as f: + enc_example_data = f.read() + + # encrypt bytes + edata = cipher.encrypt(example_data) + self.assertIsInstance(edata, bytes) + self.assertEqual(enc_example_data, edata) + + # encrypt str + edata = cipher.encrypt('hello world') + self.assertIsInstance(edata, bytes) + self.assertNotEqual('hello world'.encode(), edata) + eedata = cipher.decrypt(edata) + self.assertEqual('hello world'.encode(), eedata) + + # file-like + edata = b'' + with open("./tests/data/example.jpg", 'rb') as f: + encf = cipher.encrypt(f) + self.assertIsInstance(encf, aes_ctr.FileLikeEncryptor) + encf = cast(aes_ctr.FileLikeEncryptor, encf) + self.assertEqual(0, len(edata)) + while True: + d = encf.read(8*1024) + edata += d + if len(d) < 8*1024: + break + self.assertEqual(enc_example_data, edata) + + #seek + self.assertEqual(len(enc_example_data), encf.tell()) + encf.seek(16, io.SEEK_SET) + self.assertEqual(16, encf.tell()) + rlen = 8*1024 + edata = encf.read(8*1024) + self.assertEqual(enc_example_data[16:16 + rlen], edata) + + encf.seek(128, io.SEEK_SET) + self.assertEqual(128, encf.tell()) + edata = encf.read() + self.assertEqual(enc_example_data[128:], edata) + + # iterator bytes + cipher.no_bytes = True + eiter = cipher.encrypt(example_data) + self.assertIsInstance(eiter, aes_ctr.IteratorEncryptor) + edata = b'' + for d in eiter: + edata += d + self.assertEqual(enc_example_data, edata) + + # iterator str + cipher.no_str = True + eiter = cipher.encrypt('1234567890abcdefghijklmnopqrstuvwxyz') + self.assertIsInstance(eiter, aes_ctr.IteratorEncryptor) + edata = b'' + for d in eiter: + edata += d + + cipher.no_str = False + edata1 = cipher.encrypt('1234567890abcdefghijklmnopqrstuvwxyz') + self.assertEqual(edata1, edata) + self.assertEqual(len(edata1), len('1234567890abcdefghijklmnopqrstuvwxyz')) + + # file-like + offset + edata = b'' + with open("./tests/data/example.jpg", 'rb') as f: + # seek ahead offset + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=128 + ) + f.seek(128, io.SEEK_SET) + encf = cipher.encrypt(f) + edata = encf.read() + self.assertEqual(enc_example_data[128:], edata) + + + def test_encrypt_exception(self): + cipher = rsa_cipher.MasterRsaCipher(None, None, rsa_private) + encrypted_key = "nyXOp7delQ/MQLjKQMhHLaT0w7u2yQoDLkSnK8MFg/MwYdh4na4/LS8LLbLcM18m8I/ObWUHU775I50sJCpdv+f4e0jLeVRRiDFWe+uo7Puc9j4xHj8YB3QlcIOFQiTxHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=" + encrypted_iv = "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwox4WhLGng5DK2vNXxULmulMUUpYkdc9umqmDilgSy5Z3Foafw+v4JJThfw68T/9G2gxZLrQTbAlvFPFfPM9Ehk6cY4+8WpY32uN8w5vrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=" + key = cipher.decrypt(base64.b64decode(encrypted_key)) + iv = cipher.decrypt(base64.b64decode(encrypted_iv)) + cipher_data = CipherData( + iv = iv, + key=key, + encrypted_iv=base64.b64decode(encrypted_iv), + encrypted_key=base64.b64decode(encrypted_key), + mat_desc='', + wrap_algorithm='RSA/NONE/PKCS1Padding', + cek_algorithm='AES/CTR/NoPadding' + ) + + # unsupport type + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=0 + ) + try: + cipher.encrypt(123) + self.fail('should not here') + except TypeError as err: + self.assertIn("src is not str/bytes/file-like/Iterable type, got", str(err)) + + + # file-like + edata = b'' + with open("./tests/data/example.jpg", 'rb') as f: + encf = cipher.encrypt(f) + self.assertIsInstance(encf, aes_ctr.FileLikeEncryptor) + encf = cast(aes_ctr.FileLikeEncryptor, encf) + self.assertEqual(0, len(edata)) + + # offset not align to encrypt block + encf.seek(17, io.SEEK_SET) + try: + edata = encf.read(8*1024) + self.fail('should not here') + except ValueError as err: + self.assertIn("offset is not align to encrypt block", str(err)) + + # offset not align to encrypt block + encf.seek(32, io.SEEK_SET) + try: + edata = encf.read(1234) + self.fail('should not here') + except ValueError as err: + self.assertIn("n is not align to encrypt block", str(err)) + + # seek ahead offset + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=128 + ) + f.seek(128, io.SEEK_SET) + encf = cipher.encrypt(f) + try: + encf.seek(0, io.SEEK_SET) + self.fail('should not here') + except ValueError as err: + self.assertIn(", can not creates cipher.", str(err)) + + + def test_decrypt(self): + cipher = rsa_cipher.MasterRsaCipher(None, None, rsa_private) + encrypted_key = "nyXOp7delQ/MQLjKQMhHLaT0w7u2yQoDLkSnK8MFg/MwYdh4na4/LS8LLbLcM18m8I/ObWUHU775I50sJCpdv+f4e0jLeVRRiDFWe+uo7Puc9j4xHj8YB3QlcIOFQiTxHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=" + encrypted_iv = "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwox4WhLGng5DK2vNXxULmulMUUpYkdc9umqmDilgSy5Z3Foafw+v4JJThfw68T/9G2gxZLrQTbAlvFPFfPM9Ehk6cY4+8WpY32uN8w5vrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=" + key = cipher.decrypt(base64.b64decode(encrypted_key)) + iv = cipher.decrypt(base64.b64decode(encrypted_iv)) + cipher_data = CipherData( + iv = iv, + key=key, + encrypted_iv=base64.b64decode(encrypted_iv), + encrypted_key=base64.b64decode(encrypted_key), + mat_desc='', + wrap_algorithm='RSA/NONE/PKCS1Padding', + cek_algorithm='AES/CTR/NoPadding' + ) + + cipher = aes_ctr._AesCtr( + cipher_data=cipher_data, + offset=0 + ) + + example_data = b'' + with open("./tests/data/example.jpg", 'rb') as f: + example_data = f.read() + self.assertTrue(len(example_data) > 0) + + enc_example_data = b'' + with open("./tests/data/enc-example.jpg", 'rb') as f: + enc_example_data = f.read() + + # decrypt bytes + data = cipher.decrypt(enc_example_data) + self.assertIsInstance(data, bytes) + self.assertEqual(example_data, data) + + # decrypt StreamBody + dataf = cipher.decrypt(StubStreamBody(data=enc_example_data)) + self.assertIsInstance(dataf, aes_ctr.StreamBodyDecryptor) + dataf = cast(aes_ctr.StreamBodyDecryptor, dataf) + self.assertEqual(example_data, dataf.content) + self.assertEqual(example_data, dataf.read()) + + data = b'' + check_once = True + for d in dataf.iter_bytes(): + if check_once: + self.assertEqual(4*1024, len(d)) + check_once = False + data += d + self.assertEqual(example_data, dataf.read()) + + data = b'' + check_once = True + for d in dataf.iter_bytes(block_size=8*1024): + if check_once: + self.assertEqual(8*1024, len(d)) + check_once = False + data += d + self.assertEqual(example_data, dataf.read()) + diff --git a/tests/unit/crypto/test_master_rsa_cipher.py b/tests/unit/crypto/test_master_rsa_cipher.py new file mode 100644 index 0000000..7437748 --- /dev/null +++ b/tests/unit/crypto/test_master_rsa_cipher.py @@ -0,0 +1,116 @@ +# pylint: skip-file +import unittest +import alibabacloud_oss_v2.crypto.master_rsa_cipher as rsa_cipher + +RSA_PUBLIC_KEY = """-----BEGIN PUBLIC KEY----- +MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCokfiAVXXf5ImFzKDw+XO/UByW +6mse2QsIgz3ZwBtMNu59fR5zttSx+8fB7vR4CN3bTztrP9A6bjoN0FFnhlQ3vNJC +5MFO1PByrE/MNd5AAfSVba93I6sx8NSk5MzUCA4NJzAUqYOEWGtGBcom6kEF6MmR +1EKib1Id8hpooY5xaQIDAQAB +-----END PUBLIC KEY-----""" + +RSA_PRIVATE_KEY = """-----BEGIN PRIVATE KEY----- +MIICdQIBADANBgkqhkiG9w0BAQEFAASCAl8wggJbAgEAAoGBAKiR+IBVdd/kiYXM +oPD5c79QHJbqax7ZCwiDPdnAG0w27n19HnO21LH7x8Hu9HgI3dtPO2s/0DpuOg3Q +UWeGVDe80kLkwU7U8HKsT8w13kAB9JVtr3cjqzHw1KTkzNQIDg0nMBSpg4RYa0YF +yibqQQXoyZHUQqJvUh3yGmihjnFpAgMBAAECgYA49RmCQ14QyKevDfVTdvYlLmx6 +kbqgMbYIqk+7w611kxoCTMR9VMmJWgmk/Zic9mIAOEVbd7RkCdqT0E+xKzJJFpI2 +ZHjrlwb21uqlcUqH1Gn+wI+jgmrafrnKih0kGucavr/GFi81rXixDrGON9KBE0FJ +cPVdc0XiQAvCBnIIAQJBANXu3htPH0VsSznfqcDE+w8zpoAJdo6S/p30tcjsDQnx +l/jYV4FXpErSrtAbmI013VYkdJcghNSLNUXppfk2e8UCQQDJt5c07BS9i2SDEXiz +byzqCfXVzkdnDj9ry9mba1dcr9B9NCslVelXDGZKvQUBqNYCVxg398aRfWlYDTjU +IoVVAkAbTyjPN6R4SkC4HJMg5oReBmvkwFCAFsemBk0GXwuzD0IlJAjXnAZ+/rIO +ItewfwXIL1Mqz53lO/gK+q6TR585AkB304KUIoWzjyF3JqLP3IQOxzns92u9EV6l +V2P+CkbMPXiZV6sls6I4XppJXX2i3bu7iidN3/dqJ9izQK94fMU9AkBZvgsIPCot +y1/POIbv9LtnviDKrmpkXgVQSU4BmTPvXwTJm8APC7P/horSh3SVf1zgmnsyjm9D +hO92gGc+4ajL +-----END PRIVATE KEY-----""" + +RSA_PUBLIC_KEY_PKS1 = """-----BEGIN RSA PUBLIC KEY----- +MIGJAoGBAKiR+IBVdd/kiYXMoPD5c79QHJbqax7ZCwiDPdnAG0w27n19HnO21LH7 +x8Hu9HgI3dtPO2s/0DpuOg3QUWeGVDe80kLkwU7U8HKsT8w13kAB9JVtr3cjqzHw +1KTkzNQIDg0nMBSpg4RYa0YFyibqQQXoyZHUQqJvUh3yGmihjnFpAgMBAAE= +-----END RSA PUBLIC KEY-----""" + +RSA_PRIVATE_KEY_PKS1 = """-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQCokfiAVXXf5ImFzKDw+XO/UByW6mse2QsIgz3ZwBtMNu59fR5z +ttSx+8fB7vR4CN3bTztrP9A6bjoN0FFnhlQ3vNJC5MFO1PByrE/MNd5AAfSVba93 +I6sx8NSk5MzUCA4NJzAUqYOEWGtGBcom6kEF6MmR1EKib1Id8hpooY5xaQIDAQAB +AoGAOPUZgkNeEMinrw31U3b2JS5sepG6oDG2CKpPu8OtdZMaAkzEfVTJiVoJpP2Y +nPZiADhFW3e0ZAnak9BPsSsySRaSNmR465cG9tbqpXFKh9Rp/sCPo4Jq2n65yood +JBrnGr6/xhYvNa14sQ6xjjfSgRNBSXD1XXNF4kALwgZyCAECQQDV7t4bTx9FbEs5 +36nAxPsPM6aACXaOkv6d9LXI7A0J8Zf42FeBV6RK0q7QG5iNNd1WJHSXIITUizVF +6aX5NnvFAkEAybeXNOwUvYtkgxF4s28s6gn11c5HZw4/a8vZm2tXXK/QfTQrJVXp +VwxmSr0FAajWAlcYN/fGkX1pWA041CKFVQJAG08ozzekeEpAuByTIOaEXgZr5MBQ +gBbHpgZNBl8Lsw9CJSQI15wGfv6yDiLXsH8FyC9TKs+d5Tv4Cvquk0efOQJAd9OC +lCKFs48hdyaiz9yEDsc57PdrvRFepVdj/gpGzD14mVerJbOiOF6aSV19ot27u4on +Td/3aifYs0CveHzFPQJAWb4LCDwqLctfzziG7/S7Z74gyq5qZF4FUElOAZkz718E +yZvADwuz/4aK0od0lX9c4Jp7Mo5vQ4TvdoBnPuGoyw== +-----END RSA PRIVATE KEY-----""" + +class TestMasterRsaCipher(unittest.TestCase): + def test_normal(self): + cipher = rsa_cipher.MasterRsaCipher(None, None, None) + self.assertIsNotNone(cipher) + self.assertEqual('', cipher.get_mat_desc()) + self.assertEqual('RSA/NONE/PKCS1Padding', cipher.get_wrap_algorithm()) + + cipher = rsa_cipher.MasterRsaCipher({'key':'value'}, RSA_PUBLIC_KEY, RSA_PRIVATE_KEY) + self.assertIsNotNone(cipher) + self.assertEqual('{"key": "value"}', cipher.get_mat_desc()) + self.assertEqual('RSA/NONE/PKCS1Padding', cipher.get_wrap_algorithm()) + data = 'hello world'.encode() + edata = cipher.encrypt(data) + self.assertNotEqual(edata, data) + eedata = cipher.decrypt(edata) + self.assertEqual(eedata, data) + + cipher = rsa_cipher.MasterRsaCipher({'key1':'value'}, RSA_PUBLIC_KEY_PKS1, RSA_PRIVATE_KEY_PKS1) + self.assertIsNotNone(cipher) + self.assertEqual('{"key1": "value"}', cipher.get_mat_desc()) + self.assertEqual('RSA/NONE/PKCS1Padding', cipher.get_wrap_algorithm()) + data = 'hello world 123'.encode() + edata = cipher.encrypt(data) + self.assertNotEqual(edata, data) + eedata = cipher.decrypt(edata) + self.assertEqual(eedata, data) + + cipher_mix = rsa_cipher.MasterRsaCipher({'key':'value'}, RSA_PUBLIC_KEY, RSA_PRIVATE_KEY_PKS1) + data = 'hello world 123 mix'.encode() + edata = cipher_mix.encrypt(data) + self.assertNotEqual(edata, data) + eedata = cipher_mix.decrypt(edata) + self.assertEqual(eedata, data) + + def test_error(self): + try: + cipher = rsa_cipher.MasterRsaCipher({'key':'value'}, 'RSA_PUBLIC_KEY', RSA_PRIVATE_KEY) + self.assertIsNotNone(cipher) + self.fail('should not here') + except Exception as err: + self.assertIn("RSA key format is not supported", str(err)) + + try: + cipher = rsa_cipher.MasterRsaCipher({'key':'value'}, RSA_PUBLIC_KEY, 'RSA_PRIVATE_KEY') + self.assertIsNotNone(cipher) + self.fail('should not here') + except Exception as err: + self.assertIn("RSA key format is not supported", str(err)) + + cipher = rsa_cipher.MasterRsaCipher({'key':'value'}, None, None) + self.assertIsNotNone(cipher) + + try: + data = 'hello world 123'.encode() + _ = cipher.encrypt(data) + self.fail('should not here') + except Exception as err: + self.assertIn("RSA public key is none or invalid.", str(err)) + + + try: + data = 'hello world 123'.encode() + _ = cipher.decrypt(data) + self.fail('should not here') + except Exception as err: + self.assertIn("RSA private key is none or invalid.", str(err)) diff --git a/tests/unit/models/__init__.py b/tests/unit/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/models/test_bucket_basic.py b/tests/unit/models/test_bucket_basic.py new file mode 100644 index 0000000..14f9034 --- /dev/null +++ b/tests/unit/models/test_bucket_basic.py @@ -0,0 +1,1639 @@ +# pylint: skip-file +import unittest +import xml.etree.ElementTree as ET +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2.models import bucket_basic as model +from alibabacloud_oss_v2.types import OperationInput, OperationOutput, CaseInsensitiveDict +from .. import MockHttpResponse +import datetime + +class TestPutBucket(unittest.TestCase): + def test_constructor_types(self): + cfg = model.CreateBucketConfiguration() + self.assertIsNone(cfg.storage_class) + self.assertIsNone(cfg.data_redundancy_type) + self.assertIsInstance(cfg, serde.Model) + + cfg = model.CreateBucketConfiguration( + storage_class='IA', + data_redundancy_type='LZR' + ) + self.assertEqual('IA', cfg.storage_class) + self.assertEqual('LZR', cfg.data_redundancy_type) + + cfg = model.CreateBucketConfiguration( + storage_class='Cold', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(cfg, 'storage_class')) + self.assertEqual('Cold', cfg.storage_class) + self.assertFalse(hasattr(cfg, 'invalid_field')) + + def test_serialize_types(self): + cfg = model.CreateBucketConfiguration() + xml_data = serde.serialize_xml(cfg) + root = ET.fromstring(xml_data) + self.assertEqual('CreateBucketConfiguration', root.tag) + self.assertEqual(None, root.findtext('StorageClass')) + self.assertEqual(None, root.findtext('DataRedundancyType')) + + cfg = model.CreateBucketConfiguration( + storage_class='Standard', + ) + xml_data = serde.serialize_xml(cfg) + root = ET.fromstring(xml_data) + self.assertEqual('CreateBucketConfiguration', root.tag) + self.assertEqual('Standard', root.findtext('StorageClass')) + self.assertEqual(None, root.findtext('DataRedundancyType')) + + cfg = model.CreateBucketConfiguration( + storage_class='Standard', + data_redundancy_type='LRS' + ) + xml_data = serde.serialize_xml(cfg) + root = ET.fromstring(xml_data) + self.assertEqual('CreateBucketConfiguration', root.tag) + self.assertEqual('Standard', root.findtext('StorageClass')) + self.assertEqual('LRS', root.findtext('DataRedundancyType')) + + def test_deserialize_types(self): + xml_data = r''' + + Standard + LRS + ''' + + cfg = model.CreateBucketConfiguration() + serde.deserialize_xml(xml_data=xml_data, obj=cfg) + self.assertEqual('Standard', cfg.storage_class) + self.assertEqual('LRS', cfg.data_redundancy_type) + + def test_constructor_request(self): + request = model.PutBucketRequest(bucket=None) + self.assertIsNone(request.bucket) + self.assertIsNone(request.acl) + self.assertIsNone(request.resource_group_id) + self.assertIsNone(request.create_bucket_configuration) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.PutBucketRequest( + bucket='bucket', + acl='acl', + resource_group_id='rg-id', + create_bucket_configuration=model.CreateBucketConfiguration( + storage_class='Standard' + ), + ) + self.assertEqual('bucket', request.bucket) + self.assertEqual('acl', request.acl) + self.assertEqual('rg-id', request.resource_group_id) + self.assertEqual( + 'Standard', request.create_bucket_configuration.storage_class) + self.assertEqual( + None, request.create_bucket_configuration.data_redundancy_type) + + # common headers & parameters & payload + request = model.PutBucketRequest( + bucket='bucket', + acl='acl', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket', request.bucket) + self.assertEqual('acl', request.acl) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.PutBucketRequest( + bucket='bucket', + acl='acl', + resource_group_id='rg-id', + create_bucket_configuration=model.CreateBucketConfiguration( + storage_class='Standard' + ), + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='PutBucket', + method='PUT', + bucket=request.bucket, + )) + + self.assertEqual('PutBucket', op_input.op_name) + self.assertEqual('PUT', op_input.method) + self.assertEqual('bucket', op_input.bucket) + + self.assertEqual('acl', op_input.headers.get('x-oss-acl')) + self.assertEqual( + 'rg-id', op_input.headers.get('x-oss-resource-group-id')) + self.assertEqual(0, len(op_input.parameters.items())) + + root = ET.fromstring(op_input.body) + self.assertEqual('CreateBucketConfiguration', root.tag) + self.assertEqual('Standard', root.findtext('StorageClass')) + self.assertEqual(None, root.findtext('DataRedundancyType')) + + def test_constructor_result(self): + result = model.PutBucketResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.PutBucketResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123' + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + + +class TestPutBucketAcl(unittest.TestCase): + def test_constructor_request(self): + request = model.PutBucketAclRequest(bucket='example-bucket') + self.assertIsNotNone(request.bucket) + self.assertIsNone(request.acl) + self.assertIsInstance(request, serde.Model) + + request = model.PutBucketAclRequest( + bucket='example-bucket', + acl='private' + ) + self.assertEqual('example-bucket', request.bucket) + self.assertEqual('private', request.acl) + + request = model.PutBucketAclRequest( + bucket='example-bucket', + acl='public-read-write', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertTrue(hasattr(request, 'acl')) + self.assertEqual('public-read-write', request.acl) + self.assertFalse(hasattr(request, 'invalid_field')) + + + def test_serialize_request(self): + request = model.PutBucketAclRequest( + bucket='example-bucket', + acl='public-read-write', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='PutBucketAcl', + method='PUT', + bucket=request.bucket, + )) + + self.assertEqual('PutBucketAcl', op_input.op_name) + self.assertEqual('PUT', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual('public-read-write', op_input.headers.get('x-oss-acl')) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.PutBucketAclResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.PutBucketAclResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123' + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + +class TestGetBucketAcl(unittest.TestCase): + def test_constructor_request(self): + request = model.GetBucketAclRequest(bucket='example-bucket') + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + + request = model.GetBucketAclRequest( + bucket='example-bucket', + ) + self.assertEqual('example-bucket', request.bucket) + + request = model.GetBucketAclRequest( + bucket='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + request = model.GetBucketAclRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetBucketAcl', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('GetBucketAcl', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.GetBucketAclResult() + self.assertIsNone(result.acl) + self.assertIsInstance(result, serde.Model) + + result = model.GetBucketAclResult( + acl='public-read-write', + owner=model.Owner( + id='0022012****', + display_name='user_example', + ), + ) + self.assertEqual('public-read-write', result.acl) + self.assertEqual('0022012****', result.owner.id) + self.assertEqual('user_example', result.owner.display_name) + + result = model.GetBucketAclResult( + acl='public-read-write', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'acl')) + self.assertEqual('public-read-write', result.acl) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + + 0022012**** + user_example + + + public-read + + ''' + + result = model.GetBucketAclResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('0022012****', result.owner.id) + self.assertEqual('user_example', result.owner.display_name) + self.assertEqual('public-read', result.acl) + + +class TestListObjectsV2(unittest.TestCase): + def test_constructor_request(self): + request = model.ListObjectsV2Request(bucket='example-bucket') + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + + request = model.ListObjectsV2Request( + bucket='example-bucket', + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + ) + self.assertEqual('example-bucket', request.bucket) + self.assertEqual('/', request.delimiter) + self.assertEqual('b', request.start_after) + self.assertEqual('url', request.encoding_type) + self.assertEqual('ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', request.continuation_token) + self.assertEqual(10, request.max_keys) + self.assertEqual('aaa', request.prefix) + self.assertEqual(True, request.fetch_owner) + self.assertEqual('requester', request.request_payer) + + request = model.ListObjectsV2Request( + bucket='example-bucket', + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertTrue(hasattr(request, 'delimiter')) + self.assertTrue(hasattr(request, 'start_after')) + self.assertTrue(hasattr(request, 'encoding_type')) + self.assertTrue(hasattr(request, 'continuation_token')) + self.assertTrue(hasattr(request, 'max_keys')) + self.assertTrue(hasattr(request, 'prefix')) + self.assertTrue(hasattr(request, 'fetch_owner')) + self.assertTrue(hasattr(request, 'request_payer')) + self.assertFalse(hasattr(request, 'invalid_field')) + self.assertEqual('example-bucket', request.bucket) + + + + def test_serialize_request(self): + request = model.ListObjectsV2Request( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjectsV2', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjectsV2', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + request = model.ListObjectsV2Request( + bucket='example-bucket', + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester' + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjectsV2', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjectsV2', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(7, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.ListObjectsV2Result() + self.assertIsNone(result.name) + self.assertIsNone(result.prefix) + self.assertIsNone(result.continuation_token) + self.assertIsNone(result.max_keys) + self.assertIsNone(result.delimiter) + self.assertIsNone(result.is_truncated) + self.assertIsNone(result.next_continuation_token) + self.assertIsNone(result.encoding_type) + self.assertIsNone(result.contents) + self.assertIsNone(result.common_prefixes) + self.assertIsNone(result.key_count) + self.assertIsNone(result.start_after) + self.assertIsInstance(result, serde.Model) + + result = model.ListObjectsV2Result( + name='example-bucket', + prefix='aaa', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=998, + delimiter='/', + is_truncated=True, + next_continuation_token='NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + encoding_type='url', + contents=[model.ObjectProperties( + key='fun/movie/001.avi', + object_type='Normal', + size=344606, + etag='5B3C1A2E053D763E1B002CC607C5A0FE1****', + last_modified=datetime.datetime.fromtimestamp(1702743657), + storage_class='ColdArchive', + owner=model.Owner( + id='0022012****', + display_name='user_example', + ), + restore_info='ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', + )], + common_prefixes=[model.CommonPrefix( + prefix='fun/movie/', + )], + start_after='b', + key_count=20, + ) + self.assertEqual('example-bucket', result.name) + self.assertEqual('aaa', result.prefix) + self.assertEqual('ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.continuation_token) + self.assertEqual(998, result.max_keys) + self.assertEqual('/', result.delimiter) + self.assertEqual(True, result.is_truncated) + self.assertEqual('NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.next_continuation_token) + self.assertEqual('url', result.encoding_type) + self.assertEqual('b', result.start_after) + self.assertEqual(20, result.key_count) + self.assertEqual('fun/movie/001.avi', result.contents[0].key) + self.assertEqual('2023-12-17T00:20:57.000Z', result.contents[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual("5B3C1A2E053D763E1B002CC607C5A0FE1****", result.contents[0].etag) + self.assertEqual('Normal', result.contents[0].object_type) + self.assertEqual(344606, result.contents[0].size) + self.assertEqual('ColdArchive', result.contents[0].storage_class) + self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[0].restore_info) + self.assertEqual('0022012****', result.contents[0].owner.id) + self.assertEqual('user_example', result.contents[0].owner.display_name) + self.assertEqual('fun/movie/', result.common_prefixes[0].prefix) + + result = model.ListObjectsV2Result( + name='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'name')) + self.assertEqual('example-bucket', result.name) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + examplebucket + aaa + CgJiYw-- + 100 + / + b + url + false + NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA + + exampleobject11.txt + 2020-06-22T11:42:32.000Z + "5B3C1A2E053D763E1B002CC607C5A0FE1****" + Normal + 344606 + ColdArchive + + 0022012**** + user-example + + ongoing-request="true" + + + exampleobject2.txt + 2023-12-08T08:12:20.000Z + "5B3C1A2E053D763E1B002CC607C5A0FE1****" + Normal2 + 344607 + DeepColdArchive + + 0022012****22 + user-example22 + + ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT" + + + a/b/ + + 3 +''' + + result = model.ListObjectsV2Result() + serde.deserialize_xml(xml_data=xml_data, obj=result) + + self.assertEqual('examplebucket', result.name) + self.assertEqual('aaa', result.prefix) + self.assertEqual('CgJiYw--', result.continuation_token) + self.assertEqual(100, result.max_keys) + self.assertEqual('/', result.delimiter) + self.assertEqual(False, result.is_truncated) + self.assertEqual('NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.next_continuation_token) + self.assertEqual('url', result.encoding_type) + self.assertEqual('b', result.start_after) + self.assertEqual(3, result.key_count) + self.assertEqual('exampleobject11.txt', result.contents[0].key) + self.assertEqual('2020-06-22T11:42:32.000Z', result.contents[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"5B3C1A2E053D763E1B002CC607C5A0FE1****"', result.contents[0].etag) + self.assertEqual('Normal', result.contents[0].object_type) + self.assertEqual(344606, result.contents[0].size) + self.assertEqual('ColdArchive', result.contents[0].storage_class) + self.assertEqual('ongoing-request="true"', result.contents[0].restore_info) + self.assertEqual('0022012****', result.contents[0].owner.id) + self.assertEqual('user-example', result.contents[0].owner.display_name) + self.assertEqual('exampleobject2.txt', result.contents[1].key) + self.assertEqual('2023-12-08T08:12:20.000Z', result.contents[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"5B3C1A2E053D763E1B002CC607C5A0FE1****"', result.contents[1].etag) + self.assertEqual('Normal2', result.contents[1].object_type) + self.assertEqual(344607, result.contents[1].size) + self.assertEqual('DeepColdArchive', result.contents[1].storage_class) + self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[1].restore_info) + self.assertEqual('0022012****22', result.contents[1].owner.id) + self.assertEqual('user-example22', result.contents[1].owner.display_name) + + +class TestGetBucketStat(unittest.TestCase): + def test_constructor_request(self): + request = model.GetBucketStatRequest( + bucket='example-bucket' + ) + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + self.assertEqual('example-bucket', request.bucket) + + request = model.GetBucketStatRequest( + bucket='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + request = model.GetBucketStatRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetBucketStat', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('GetBucketStat', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.GetBucketStatResult() + self.assertIsNone(result.storage) + self.assertIsInstance(result, serde.Model) + + result = model.GetBucketStatResult( + storage=1600, + object_count=230, + multi_part_upload_count=40, + live_channel_count=4, + last_modified_time=1643341269, + standard_storage=430, + standard_object_count=66, + infrequent_access_storage=2359296, + infrequent_access_real_storage=360, + infrequent_access_object_count=54, + archive_storage=2949120, + archive_real_storage=450, + archive_object_count=74, + cold_archive_storage=2359496, + cold_archive_real_storage=3610, + cold_archive_object_count=36, + deep_cold_archive_storage=23594961, + deep_cold_archive_real_storage=10, + deep_cold_archive_object_count=16, + delete_marker_count=1234355467575856878, + ) + self.assertEqual(1600, result.storage) + self.assertEqual(230, result.object_count) + self.assertEqual(40, result.multi_part_upload_count) + self.assertEqual(4, result.live_channel_count) + self.assertEqual(1643341269, result.last_modified_time) + self.assertEqual(430, result.standard_storage) + self.assertEqual(66, result.standard_object_count) + self.assertEqual(2359296, result.infrequent_access_storage) + self.assertEqual(360, result.infrequent_access_real_storage) + self.assertEqual(54, result.infrequent_access_object_count) + self.assertEqual(2949120, result.archive_storage) + self.assertEqual(450, result.archive_real_storage) + self.assertEqual(74, result.archive_object_count) + self.assertEqual(2359496, result.cold_archive_storage) + self.assertEqual(3610, result.cold_archive_real_storage) + self.assertEqual(36, result.cold_archive_object_count) + self.assertEqual(23594961, result.deep_cold_archive_storage) + self.assertEqual(10, result.deep_cold_archive_real_storage) + self.assertEqual(16, result.deep_cold_archive_object_count) + self.assertEqual(1234355467575856878, result.delete_marker_count) + + result = model.GetBucketStatResult( + storage=1600, + object_count=230, + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'storage')) + self.assertTrue(hasattr(result, 'object_count')) + self.assertEqual(1600, result.storage) + self.assertEqual(230, result.object_count) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + 1600 + 230 + 40 + 4 + 1643341269 + 430 + 66 + 2359296 + 360 + 54 + 2949120 + 450 + 74 + 2359296 + 3610 + 36 + 2359296 + 23594961 + 10 + 16 + 1234355467575856878 +''' + + result = model.GetBucketStatResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual(1600, result.storage) + self.assertEqual(230, result.object_count) + self.assertEqual(40, result.multi_part_upload_count) + self.assertEqual(4, result.live_channel_count) + self.assertEqual(1643341269, result.last_modified_time) + self.assertEqual(430, result.standard_storage) + self.assertEqual(66, result.standard_object_count) + self.assertEqual(2359296, result.infrequent_access_storage) + self.assertEqual(360, result.infrequent_access_real_storage) + self.assertEqual(54, result.infrequent_access_object_count) + self.assertEqual(2949120, result.archive_storage) + self.assertEqual(450, result.archive_real_storage) + self.assertEqual(74, result.archive_object_count) + self.assertEqual(2359296, result.cold_archive_storage) + self.assertEqual(3610, result.cold_archive_real_storage) + self.assertEqual(36, result.cold_archive_object_count) + self.assertEqual(23594961, result.deep_cold_archive_storage) + self.assertEqual(10, result.deep_cold_archive_real_storage) + self.assertEqual(16, result.deep_cold_archive_object_count) + self.assertEqual(1234355467575856878, result.delete_marker_count) + + +class TestListObjects(unittest.TestCase): + def test_constructor_request(self): + request = model.ListObjectsRequest(bucket='example-bucket') + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + + request = model.ListObjectsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + request_payer='requester', + ) + self.assertEqual('example-bucket', request.bucket) + self.assertEqual('/', request.delimiter) + self.assertEqual('url', request.encoding_type) + self.assertEqual('ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', request.marker) + self.assertEqual(10, request.max_keys) + self.assertEqual('aaa', request.prefix) + self.assertEqual('requester', request.request_payer) + + request = model.ListObjectsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + request_payer='requester', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertTrue(hasattr(request, 'delimiter')) + self.assertTrue(hasattr(request, 'encoding_type')) + self.assertTrue(hasattr(request, 'marker')) + self.assertTrue(hasattr(request, 'max_keys')) + self.assertTrue(hasattr(request, 'prefix')) + self.assertTrue(hasattr(request, 'request_payer')) + self.assertFalse(hasattr(request, 'invalid_field')) + self.assertEqual('example-bucket', request.bucket) + + + + def test_serialize_request(self): + request = model.ListObjectsRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjects', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjects', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + request = model.ListObjectsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + request_payer='requester' + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjects', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjects', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(5, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.ListObjectsResult() + self.assertIsNone(result.name) + self.assertIsNone(result.prefix) + self.assertIsNone(result.marker) + self.assertIsNone(result.max_keys) + self.assertIsNone(result.delimiter) + self.assertIsNone(result.is_truncated) + self.assertIsNone(result.next_marker) + self.assertIsNone(result.encoding_type) + self.assertIsNone(result.contents) + self.assertIsNone(result.common_prefixes) + self.assertIsInstance(result, serde.Model) + + result = model.ListObjectsResult( + name='example-bucket', + prefix='aaa', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=998, + delimiter='/', + is_truncated=True, + next_marker='NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + encoding_type='url', + contents=[model.ObjectProperties( + key='fun/movie/001.avi', + object_type='Normal', + size=344606, + etag='5B3C1A2E053D763E1B002CC607C5A0FE1****', + last_modified=datetime.datetime.fromtimestamp(1702743657), + storage_class='ColdArchive', + owner=model.Owner( + id='0022012****', + display_name='user_example', + ), + restore_info='ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', + )], + common_prefixes=[model.CommonPrefix( + prefix='fun/movie/', + )], + ) + self.assertEqual('example-bucket', result.name) + self.assertEqual('aaa', result.prefix) + self.assertEqual('ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.marker) + self.assertEqual(998, result.max_keys) + self.assertEqual('/', result.delimiter) + self.assertEqual(True, result.is_truncated) + self.assertEqual('NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.next_marker) + self.assertEqual('url', result.encoding_type) + self.assertEqual('fun/movie/001.avi', result.contents[0].key) + self.assertEqual('2023-12-17T00:20:57.000Z', result.contents[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual("5B3C1A2E053D763E1B002CC607C5A0FE1****", result.contents[0].etag) + self.assertEqual('Normal', result.contents[0].object_type) + self.assertEqual(344606, result.contents[0].size) + self.assertEqual('ColdArchive', result.contents[0].storage_class) + self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[0].restore_info) + self.assertEqual('0022012****', result.contents[0].owner.id) + self.assertEqual('user_example', result.contents[0].owner.display_name) + self.assertEqual('fun/movie/', result.common_prefixes[0].prefix) + + result = model.ListObjectsV2Result( + name='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'name')) + self.assertEqual('example-bucket', result.name) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + examplebucket + aaa + CgJiYw-- + 100 + / + url + false + NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA + + exampleobject11.txt + 2020-06-22T11:42:32.000Z + "5B3C1A2E053D763E1B002CC607C5A0FE1****" + Normal + 344606 + ColdArchive + + 0022012**** + user-example + + ongoing-request="true" + + + exampleobject2.txt + 2023-12-08T08:12:20.000Z + "5B3C1A2E053D763E1B002CC607C5A0FE1****" + Normal2 + 344607 + DeepColdArchive + + 0022012****22 + user-example22 + + ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT" + + + a/b/ + +''' + + result = model.ListObjectsResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + + self.assertEqual('examplebucket', result.name) + self.assertEqual('aaa', result.prefix) + self.assertEqual('CgJiYw--', result.marker) + self.assertEqual(100, result.max_keys) + self.assertEqual('/', result.delimiter) + self.assertEqual(False, result.is_truncated) + self.assertEqual('NextChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', result.next_marker) + self.assertEqual('url', result.encoding_type) + self.assertEqual('exampleobject11.txt', result.contents[0].key) + self.assertEqual('2020-06-22T11:42:32.000Z', result.contents[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"5B3C1A2E053D763E1B002CC607C5A0FE1****"', result.contents[0].etag) + self.assertEqual('Normal', result.contents[0].object_type) + self.assertEqual(344606, result.contents[0].size) + self.assertEqual('ColdArchive', result.contents[0].storage_class) + self.assertEqual('ongoing-request="true"', result.contents[0].restore_info) + self.assertEqual('0022012****', result.contents[0].owner.id) + self.assertEqual('user-example', result.contents[0].owner.display_name) + self.assertEqual('exampleobject2.txt', result.contents[1].key) + self.assertEqual('2023-12-08T08:12:20.000Z', result.contents[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"5B3C1A2E053D763E1B002CC607C5A0FE1****"', result.contents[1].etag) + self.assertEqual('Normal2', result.contents[1].object_type) + self.assertEqual(344607, result.contents[1].size) + self.assertEqual('DeepColdArchive', result.contents[1].storage_class) + self.assertEqual('ongoing-request="false", expiry-date="Sat, 05 Nov 2022 07:38:08 GMT"', result.contents[1].restore_info) + self.assertEqual('0022012****22', result.contents[1].owner.id) + self.assertEqual('user-example22', result.contents[1].owner.display_name) + + +class TestGetBucketInfo(unittest.TestCase): + def test_constructor_request(self): + request = model.GetBucketInfoRequest( + bucket='example-bucket' + ) + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + self.assertEqual('example-bucket', request.bucket) + + request = model.GetBucketInfoRequest( + bucket='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + request = model.GetBucketInfoRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetBucketInfo', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('GetBucketInfo', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.GetBucketInfoResult() + self.assertIsNone(result.bucket_info) + self.assertIsInstance(result, serde.Model) + + result = model.GetBucketInfoResult( + bucket_info=model.BucketInfo( + name = 'oss-example', + access_monitor = 'Enabled', + location = 'oss-cn-hangzhou', + creation_date = '2013-07-31T10:56:21.000Z', + extranet_endpoint = 'oss-cn-hangzhou.aliyuncs.com', + intranet_endpoint = 'oss-cn-hangzhou-internal.aliyuncs.com', + acl = 'private', + data_redundancy_type = 'LRS', + owner = model.Owner( + id='0022012****', + display_name='user_example', + ), + storage_class = 'Standard', + resource_group_id = 'rg-aek27tc********', + sse_rule = model.SSERule( + kms_master_key_id='0022012****', + sse_algorithm='user_example', + kms_data_encryption='user_example', + ), + versioning = 'Enabled', + transfer_acceleration = 'Disabled', + cross_region_replication = 'Disabled', + bucket_policy = model.BucketPolicy( + log_bucket='0022012****', + log_prefix='user_example', + ), + comment = 'comment test', + block_public_access = True, + ), + ) + + self.assertEqual('oss-example', result.bucket_info.name) + self.assertEqual('Enabled', result.bucket_info.access_monitor) + self.assertEqual('oss-cn-hangzhou', result.bucket_info.location) + self.assertEqual('2013-07-31T10:56:21.000Z', result.bucket_info.creation_date) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.bucket_info.extranet_endpoint) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.bucket_info.intranet_endpoint) + self.assertEqual('private', result.bucket_info.acl) + self.assertEqual('LRS', result.bucket_info.data_redundancy_type) + self.assertEqual('0022012****', result.bucket_info.owner.id) + self.assertEqual('user_example', result.bucket_info.owner.display_name) + self.assertEqual('Standard', result.bucket_info.storage_class) + self.assertEqual('rg-aek27tc********', result.bucket_info.resource_group_id) + self.assertEqual('0022012****', result.bucket_info.sse_rule.kms_master_key_id) + self.assertEqual('user_example', result.bucket_info.sse_rule.sse_algorithm) + self.assertEqual('user_example', result.bucket_info.sse_rule.kms_data_encryption) + self.assertEqual('Enabled', result.bucket_info.versioning) + self.assertEqual('Disabled', result.bucket_info.transfer_acceleration) + self.assertEqual('Disabled', result.bucket_info.cross_region_replication) + self.assertEqual('0022012****', result.bucket_info.bucket_policy.log_bucket) + self.assertEqual('user_example', result.bucket_info.bucket_policy.log_prefix) + self.assertEqual('comment test', result.bucket_info.comment) + self.assertEqual(True, result.bucket_info.block_public_access) + + result = model.GetBucketInfoResult( + bucket_info=model.BucketInfo( + name='oss-example', + access_monitor='Enabled', + invalid_field='invalid_field' + ) + ) + self.assertTrue(hasattr(result.bucket_info, 'name')) + self.assertTrue(hasattr(result.bucket_info, 'access_monitor')) + self.assertEqual('oss-example', result.bucket_info.name) + self.assertEqual('Enabled', result.bucket_info.access_monitor) + self.assertFalse(hasattr(result.bucket_info, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + + Enabled + 2013-07-31T10:56:21.000Z + oss-cn-hangzhou.aliyuncs.com + oss-cn-hangzhou-internal.aliyuncs.com + oss-cn-hangzhou + Standard + Disabled + Disabled + LRS + oss-example + rg-aek27tc******** + + username + 27183473914**** + + + private + + + KMS + shUhih687675***32edghadg + SM4 + + + examplebucket + log/ + + test + Enabled + true + +''' + + result = model.GetBucketInfoResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('oss-example', result.bucket_info.name) + self.assertEqual('Enabled', result.bucket_info.access_monitor) + self.assertEqual('oss-cn-hangzhou', result.bucket_info.location) + self.assertEqual('2013-07-31T10:56:21.000Z', result.bucket_info.creation_date.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.bucket_info.extranet_endpoint) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.bucket_info.intranet_endpoint) + self.assertEqual('private', result.bucket_info.acl) + self.assertEqual('LRS', result.bucket_info.data_redundancy_type) + self.assertEqual('27183473914****', result.bucket_info.owner.id) + self.assertEqual('username', result.bucket_info.owner.display_name) + self.assertEqual('Standard', result.bucket_info.storage_class) + self.assertEqual('rg-aek27tc********', result.bucket_info.resource_group_id) + self.assertEqual('shUhih687675***32edghadg', result.bucket_info.sse_rule.kms_master_key_id) + self.assertEqual('KMS', result.bucket_info.sse_rule.sse_algorithm) + self.assertEqual('SM4', result.bucket_info.sse_rule.kms_data_encryption) + self.assertEqual('Enabled', result.bucket_info.versioning) + self.assertEqual('Disabled', result.bucket_info.transfer_acceleration) + self.assertEqual('Disabled', result.bucket_info.cross_region_replication) + self.assertEqual('KMS', result.bucket_info.sse_rule.sse_algorithm) + self.assertEqual('shUhih687675***32edghadg', result.bucket_info.sse_rule.kms_master_key_id) + self.assertEqual('SM4', result.bucket_info.sse_rule.kms_data_encryption) + self.assertEqual('examplebucket', result.bucket_info.bucket_policy.log_bucket) + self.assertEqual('log/', result.bucket_info.bucket_policy.log_prefix) + self.assertEqual('test', result.bucket_info.comment) + self.assertEqual(True, result.bucket_info.block_public_access) + + +class TestGetBucketLocation(unittest.TestCase): + def test_constructor_request(self): + request = model.GetBucketLocationRequest( + bucket='example-bucket' + ) + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + self.assertEqual('example-bucket', request.bucket) + + request = model.GetBucketLocationRequest( + bucket='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + request = model.GetBucketLocationRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetBucketLocation', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('GetBucketLocation', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.GetBucketLocationResult() + self.assertIsNone(result.location) + self.assertIsInstance(result, serde.Model) + + result = model.GetBucketLocationResult( + location='oss-cn-hangzhou', + ) + + self.assertEqual('oss-cn-hangzhou', result.location) + + result = model.GetBucketLocationResult( + location='oss-cn-hangzhou', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'location')) + self.assertEqual('oss-cn-hangzhou', result.location) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' +oss-cn-hangzhou''' + + result = model.GetBucketLocationResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('oss-cn-hangzhou', result.location) + + +class TestPutBucketVersioning(unittest.TestCase): + def test_constructor_request(self): + request = model.PutBucketVersioningRequest(bucket='example-bucket') + self.assertIsNotNone(request.bucket) + self.assertIsNone(request.versioning_configuration) + self.assertIsInstance(request, serde.Model) + + request = model.PutBucketVersioningRequest( + bucket='example-bucket', + versioning_configuration=model.VersioningConfiguration( + status='Enabled' + ) + ) + self.assertEqual('example-bucket', request.bucket) + self.assertEqual('Enabled', request.versioning_configuration.status) + + request = model.PutBucketVersioningRequest( + bucket='example-bucket', + versioning_configuration=model.VersioningConfiguration( + status='Enabled' + ), + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertTrue(hasattr(request.versioning_configuration, 'status')) + self.assertEqual('Enabled', request.versioning_configuration.status) + self.assertFalse(hasattr(request, 'invalid_field')) + + + def test_serialize_request(self): + request = model.PutBucketVersioningRequest( + bucket='example-bucket', + versioning_configuration=model.VersioningConfiguration( + status='Enabled' + ), + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='PutBucketVersioning', + method='PUT', + bucket=request.bucket, + )) + + xml_data = r'''Enabled''' + + self.assertEqual('PutBucketVersioning', op_input.op_name) + self.assertEqual('PUT', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(xml_data.encode(), op_input.body) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.PutBucketVersioningResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.PutBucketVersioningResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123' + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + + +class TestGetBucketVersioning(unittest.TestCase): + def test_constructor_request(self): + request = model.GetBucketVersioningRequest( + bucket='example-bucket' + ) + self.assertIsNotNone(request.bucket) + self.assertIsInstance(request, serde.Model) + self.assertEqual('example-bucket', request.bucket) + + request = model.GetBucketVersioningRequest( + bucket='example-bucket', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('example-bucket', request.bucket) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + request = model.GetBucketVersioningRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetBucketVersioning', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('GetBucketVersioning', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.GetBucketVersioningResult() + self.assertIsNone(result.version_status) + self.assertIsInstance(result, serde.Model) + + result = model.GetBucketVersioningResult( + version_status='Enabled', + ) + + self.assertEqual('Enabled', result.version_status) + + result = model.GetBucketVersioningResult( + version_status='Enabled', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'version_status')) + self.assertEqual('Enabled', result.version_status) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + Enabled +''' + + result = model.GetBucketVersioningResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('Enabled', result.version_status) + + +class TestListObjectVersions(unittest.TestCase): + def test_constructor_request(self): + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1****', + max_keys=10, + prefix='aaa', + request_payer='requester', + ) + self.assertEqual('example-bucket', request.bucket) + self.assertEqual('/', request.delimiter) + self.assertEqual('url', request.encoding_type) + self.assertEqual('ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', request.key_marker) + self.assertEqual('CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1****', request.version_id_marker) + self.assertEqual(10, request.max_keys) + self.assertEqual('aaa', request.prefix) + self.assertEqual('requester', request.request_payer) + + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1****', + max_keys=10, + prefix='aaa', + request_payer='requester', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertTrue(hasattr(request, 'delimiter')) + self.assertTrue(hasattr(request, 'encoding_type')) + self.assertTrue(hasattr(request, 'key_marker')) + self.assertTrue(hasattr(request, 'version_id_marker')) + self.assertTrue(hasattr(request, 'max_keys')) + self.assertTrue(hasattr(request, 'prefix')) + self.assertTrue(hasattr(request, 'request_payer')) + self.assertFalse(hasattr(request, 'invalid_field')) + self.assertEqual('example-bucket', request.bucket) + + def test_serialize_request(self): + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjectVersions', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjectVersions', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(0, len(op_input.parameters.items())) + + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1****', + max_keys=10, + prefix='aaa', + request_payer='requester' + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='ListObjectVersions', + method='GET', + bucket=request.bucket, + )) + + self.assertEqual('ListObjectVersions', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('example-bucket', op_input.bucket) + self.assertEqual(6, len(op_input.parameters.items())) + + + def test_constructor_result(self): + result = model.ListObjectVersionsResult() + self.assertIsNone(result.name) + self.assertIsNone(result.key_marker) + self.assertIsNone(result.next_key_marker) + self.assertIsNone(result.version_id_marker) + self.assertIsNone(result.next_version_id_marker) + self.assertIsNone(result.prefix) + self.assertIsNone(result.delimiter) + self.assertIsNone(result.is_truncated) + self.assertIsNone(result.encoding_type) + self.assertIsNone(result.version) + self.assertIsNone(result.delete_marker) + self.assertIsNone(result.common_prefixes) + self.assertIsInstance(result, serde.Model) + + result = model.ListObjectVersionsResult( + name='demo-bucket', + prefix='demo%2F', + key_marker='', + version_id_marker='', + max_keys=20, + delimiter='%2F', + encoding_type='url', + is_truncated=True, + next_key_marker='demo%2FREADME-CN.md', + next_version_id_marker='CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****', + version=[model.ObjectVersionProperties( + key='demo%2FREADME-CN.md', + version_id='CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****', + is_latest=False, + last_modified=datetime.datetime.fromtimestamp(1702743657), + etag='"E317049B40462DE37C422CE4FC1B****"', + object_type='Normal', + size=2943, + storage_class='Standard', + owner=model.Owner( + id='150692521021****', + display_name='160692521021****', + ), + restore_info='ongoing-request="false", expiry-date="Thu, 24 Sep 2020 12:40:33 GMT"', + ), + model.ObjectVersionProperties( + key='example-object-2.jpg', + version_id='', + is_latest=True, + last_modified=datetime.datetime.fromtimestamp(1702733657), + etag='5B3C1A2E053D763E1B002CC607C5A0FE1****', + size=20, + storage_class='STANDARD', + owner=model.Owner( + id='1250000000', + display_name='1250000000', + ), + restore_info='ongoing-request="true"', + )], + delete_marker=[model.DeleteMarkerProperties( + key='demo%2FREADME-CN.md', + version_id='CAEQFBiCgID3.86GohgiIDc4ZTE0NTNhZTc5MDQxYzBhYTU5MjY1ZDFjNGJm****', + is_latest=True, + last_modified=datetime.datetime.fromtimestamp(1702755657), + owner=model.Owner( + id='150692521021****', + display_name='350692521021****', + ), + ), + model.DeleteMarkerProperties( + key='demo%2FLICENSE', + version_id='CAEQFBiBgMD0.86GohgiIGZmMmFlM2UwNjdlMzRiMGFhYjk4MjM1ZGUyZDY0****', + is_latest=True, + last_modified=datetime.datetime.fromtimestamp(1702743377), + owner=model.Owner( + id='150692521021****', + display_name='250692521021****', + ), + )], + common_prefixes=[model.CommonPrefix( + prefix='demo%2F.git%2F', + ), + model.CommonPrefix( + prefix='demo%2F.idea%2F', + )], + ) + + self.assertEqual('demo-bucket', result.name) + self.assertEqual('demo%2F', result.prefix) + self.assertEqual('', result.key_marker) + self.assertEqual('', result.version_id_marker) + self.assertEqual(20, result.max_keys) + self.assertEqual('%2F', result.delimiter) + self.assertEqual('url', result.encoding_type) + self.assertEqual(True, result.is_truncated) + self.assertEqual('demo%2FREADME-CN.md', result.next_key_marker) + self.assertEqual('CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****',result.next_version_id_marker) + self.assertEqual('demo%2FREADME-CN.md', result.version[0].key) + self.assertEqual('CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****',result.version[0].version_id) + self.assertEqual(False, result.version[0].is_latest) + self.assertEqual('2023-12-17T00:20:57.000Z', result.version[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"E317049B40462DE37C422CE4FC1B****"', result.version[0].etag) + self.assertEqual('Normal', result.version[0].object_type) + self.assertEqual(2943, result.version[0].size) + self.assertEqual('Standard', result.version[0].storage_class) + self.assertEqual('150692521021****', result.version[0].owner.id) + self.assertEqual('160692521021****', result.version[0].owner.display_name) + self.assertEqual('ongoing-request="false", expiry-date="Thu, 24 Sep 2020 12:40:33 GMT"',result.version[0].restore_info) + self.assertEqual('example-object-2.jpg', result.version[1].key) + self.assertEqual('', result.version[1].version_id) + self.assertEqual(True, result.version[1].is_latest) + self.assertEqual('2023-12-16T21:34:17.000Z', result.version[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('5B3C1A2E053D763E1B002CC607C5A0FE1****', result.version[1].etag) + self.assertEqual(20, result.version[1].size) + self.assertEqual('STANDARD', result.version[1].storage_class) + self.assertEqual('1250000000', result.version[1].owner.id) + self.assertEqual('1250000000', result.version[1].owner.display_name) + self.assertEqual('ongoing-request="true"',result.version[1].restore_info) + self.assertEqual('demo%2FREADME-CN.md', result.delete_marker[0].key) + self.assertEqual('CAEQFBiCgID3.86GohgiIDc4ZTE0NTNhZTc5MDQxYzBhYTU5MjY1ZDFjNGJm****',result.delete_marker[0].version_id) + self.assertEqual(True, result.delete_marker[0].is_latest) + self.assertEqual('2023-12-17T03:40:57.000Z', result.delete_marker[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('150692521021****', result.delete_marker[0].owner.id) + self.assertEqual('350692521021****', result.delete_marker[0].owner.display_name) + self.assertEqual('demo%2FLICENSE', result.delete_marker[1].key) + self.assertEqual('CAEQFBiBgMD0.86GohgiIGZmMmFlM2UwNjdlMzRiMGFhYjk4MjM1ZGUyZDY0****',result.delete_marker[1].version_id) + self.assertEqual(True, result.delete_marker[1].is_latest) + self.assertEqual('2023-12-17T00:16:17.000Z', result.delete_marker[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('150692521021****', result.delete_marker[1].owner.id) + self.assertEqual('250692521021****', result.delete_marker[1].owner.display_name) + self.assertEqual('demo%2F.git%2F', result.common_prefixes[0].prefix) + self.assertEqual('demo%2F.idea%2F', result.common_prefixes[1].prefix) + + result = model.ListObjectVersionsResult( + version_id_marker='BgICDzK6NnBgiIGRlZWJhY', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(result, 'version_id_marker')) + self.assertEqual('BgICDzK6NnBgiIGRlZWJhY', result.version_id_marker) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + +demo-bucket + demo%2F + + + 20 + %2F + url + true + demo%2FREADME-CN.md + CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl**** + + demo%2FREADME-CN.md + CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl**** + false + 2022-09-28T09:04:39.000Z + "E317049B40462DE37C422CE4FC1B****" + Normal + 2943 + Standard + + 150692521021**** + 160692521021**** + + ongoing-request="false", expiry-date="Thu, 24 Sep 2020 12:40:33 GMT" + + + example-object-2.jpg + + true + 2019-08-09T12:03:09.000Z + 5B3C1A2E053D763E1B002CC607C5A0FE1**** + 20 + STANDARD + + 1250000000 + 1250000000 + + ongoing-request="true" + + + demo%2FREADME-CN.md + CAEQFBiCgID3.86GohgiIDc4ZTE0NTNhZTc5MDQxYzBhYTU5MjY1ZDFjNGJm**** + true + 2022-11-04T08:00:06.000Z + + 150692521021**** + 350692521021**** + + + + demo%2FLICENSE + CAEQFBiBgMD0.86GohgiIGZmMmFlM2UwNjdlMzRiMGFhYjk4MjM1ZGUyZDY0**** + true + 2022-11-04T08:00:06.000Z + + 150692521021**** + 250692521021**** + + + + demo%2F.git%2F + + + demo%2F.idea%2F + + +''' + + result = model.ListObjectVersionsResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('demo-bucket', result.name) + self.assertEqual('demo%2F', result.prefix) + self.assertEqual(None, result.key_marker) + self.assertEqual(None, result.version_id_marker) + self.assertEqual(20, result.max_keys) + self.assertEqual('%2F', result.delimiter) + self.assertEqual('url', result.encoding_type) + self.assertEqual(True, result.is_truncated) + self.assertEqual('demo%2FREADME-CN.md', result.next_key_marker) + self.assertEqual('CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****',result.next_version_id_marker) + self.assertEqual('demo%2FREADME-CN.md', result.version[0].key) + self.assertEqual('CAEQEhiBgICDzK6NnBgiIGRlZWJhYmNlMGUxZDQ4YTZhNTU2MzM4Mzk5NDBl****',result.version[0].version_id) + self.assertEqual(False, result.version[0].is_latest) + self.assertEqual('2022-09-28T09:04:39.000Z', result.version[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"E317049B40462DE37C422CE4FC1B****"', result.version[0].etag) + self.assertEqual('Normal', result.version[0].object_type) + self.assertEqual(2943, result.version[0].size) + self.assertEqual('Standard', result.version[0].storage_class) + self.assertEqual('150692521021****', result.version[0].owner.id) + self.assertEqual('160692521021****', result.version[0].owner.display_name) + self.assertEqual('ongoing-request="false", expiry-date="Thu, 24 Sep 2020 12:40:33 GMT"',result.version[0].restore_info) + self.assertEqual('example-object-2.jpg', result.version[1].key) + self.assertEqual(None, result.version[1].version_id) + self.assertEqual(True, result.version[1].is_latest) + self.assertEqual('2019-08-09T12:03:09.000Z', result.version[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('5B3C1A2E053D763E1B002CC607C5A0FE1****', result.version[1].etag) + self.assertEqual(20, result.version[1].size) + self.assertEqual('STANDARD', result.version[1].storage_class) + self.assertEqual('1250000000', result.version[1].owner.id) + self.assertEqual('1250000000', result.version[1].owner.display_name) + self.assertEqual('ongoing-request="true"', result.version[1].restore_info) + self.assertEqual('demo%2FREADME-CN.md', result.delete_marker[0].key) + self.assertEqual('CAEQFBiCgID3.86GohgiIDc4ZTE0NTNhZTc5MDQxYzBhYTU5MjY1ZDFjNGJm****',result.delete_marker[0].version_id) + self.assertEqual(True, result.delete_marker[0].is_latest) + self.assertEqual('2022-11-04T08:00:06.000Z',result.delete_marker[0].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('150692521021****', result.delete_marker[0].owner.id) + self.assertEqual('350692521021****', result.delete_marker[0].owner.display_name) + self.assertEqual('demo%2FLICENSE', result.delete_marker[1].key) + self.assertEqual('CAEQFBiBgMD0.86GohgiIGZmMmFlM2UwNjdlMzRiMGFhYjk4MjM1ZGUyZDY0****',result.delete_marker[1].version_id) + self.assertEqual(True, result.delete_marker[1].is_latest) + self.assertEqual('2022-11-04T08:00:06.000Z',result.delete_marker[1].last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('150692521021****', result.delete_marker[1].owner.id) + self.assertEqual('250692521021****', result.delete_marker[1].owner.display_name) + self.assertEqual('demo%2F.git%2F', result.common_prefixes[0].prefix) + self.assertEqual('demo%2F.idea%2F', result.common_prefixes[1].prefix) \ No newline at end of file diff --git a/tests/unit/models/test_object_basic.py b/tests/unit/models/test_object_basic.py new file mode 100644 index 0000000..a701f20 --- /dev/null +++ b/tests/unit/models/test_object_basic.py @@ -0,0 +1,1721 @@ +# pylint: skip-file +import datetime +import unittest +import xml.etree.ElementTree as ET +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2.models import object_basic as model +from alibabacloud_oss_v2.types import OperationInput, OperationOutput, CaseInsensitiveDict +from .. import MockHttpResponse + +class TestPutObject(unittest.TestCase): + def test_constructor_request(self): + request = model.PutObjectRequest( + bucket='bucket-test', + key='key-test', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.acl) + self.assertIsNone(request.storage_class) + self.assertIsNone(request.metadata) + self.assertIsNone(request.cache_control) + self.assertIsNone(request.content_disposition) + self.assertIsNone(request.content_encoding) + self.assertIsNone(request.content_length) + self.assertIsNone(request.content_md5) + self.assertIsNone(request.content_type) + self.assertIsNone(request.expires) + self.assertIsNone(request.server_side_encryption) + self.assertIsNone(request.server_side_data_encryption) + self.assertIsNone(request.sse_kms_key_id) + self.assertIsNone(request.tagging) + self.assertIsNone(request.callback) + self.assertIsNone(request.callback_var) + self.assertIsNone(request.forbid_overwrite) + self.assertIsNone(request.traffic_limit) + self.assertIsNone(request.request_payer) + self.assertIsNone(request.body) + self.assertIsNone(request.progress_fn) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.PutObjectRequest( + bucket='bucket-test', + key='key-test', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyxw==', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + callback='{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', + callback_var='{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='request_payer-test', + body='body-test', + progress_fn='progress_fn-test', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertEqual('private', request.acl) + self.assertEqual('ColdArchive', request.storage_class) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', request.metadata.get("client-side-encryption-key")) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', request.metadata.get("client-side-encryption-start")) + self.assertEqual('AES/CTR/NoPadding', request.metadata.get("client-side-encryption-cek-alg")) + self.assertEqual('RSA/NONE/PKCS1Padding', request.metadata.get("client-side-encryption-wrap-alg")) + self.assertEqual('no-cache', request.cache_control) + self.assertEqual('attachment', request.content_disposition) + self.assertEqual('utf-8', request.content_encoding) + self.assertEqual(101, request.content_length) + self.assertEqual('B5eJF1ptWaXm4bijSPyxw==',request.content_md5) + self.assertEqual('application/octet-stream', request.content_type) + self.assertEqual('2022-10-12T00:00:00.000Z', request.expires) + self.assertEqual('SM4', request.server_side_encryption) + self.assertEqual('KMS', request.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', request.sse_kms_key_id) + self.assertEqual('tagging-test', request.tagging) + self.assertEqual('{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', request.callback) + self.assertEqual('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', request.callback_var) + self.assertEqual(True, request.forbid_overwrite) + self.assertEqual(100 * 1024 * 8, request.traffic_limit) + self.assertEqual('request_payer-test', request.request_payer) + self.assertEqual('body-test', request.body) + self.assertEqual('progress_fn-test', request.progress_fn) + + + request = model.PutObjectRequest( + bucket='bucket-test', + key='key-test', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket-test', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('key-test', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.PutObjectRequest( + bucket='bucket-test', + key='key-test', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + + def test_serialize_request(self): + request = model.PutObjectRequest( + bucket='bucket-test', + key='key-test', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyxw==', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + callback='{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', + callback_var='{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', + forbid_overwrite=True, + traffic_limit=100 * 1024 * 8, + request_payer='request_payer-test', + body='body-test', + progress_fn='progress_fn-test', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='PutObject', + method='PUT', + bucket=request.bucket, + )) + self.assertEqual('PutObject', op_input.op_name) + self.assertEqual('PUT', op_input.method) + self.assertEqual('bucket-test', op_input.bucket) + self.assertEqual('private', op_input.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', op_input.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', op_input.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', op_input.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', op_input.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', op_input.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', op_input.headers.get('Cache-Control')) + self.assertEqual('attachment', op_input.headers.get('Content-Disposition')) + self.assertEqual('utf-8', op_input.headers.get('Content-Encoding')) + self.assertEqual(101, int(op_input.headers.get('Content-Length'))) + self.assertEqual('B5eJF1ptWaXm4bijSPyxw==', op_input.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', op_input.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', op_input.headers.get('Expires')) + self.assertEqual('SM4', op_input.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', op_input.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', op_input.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', op_input.headers.get('x-oss-tagging')) + self.assertEqual('{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', op_input.headers.get('x-oss-callback')) + self.assertEqual('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', op_input.headers.get('x-oss-callback-var')) + self.assertEqual(True, bool(op_input.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100 * 1024 * 8, int(op_input.headers.get('x-oss-traffic-limit'))) + self.assertEqual('request_payer-test', op_input.headers.get('x-oss-request-payer')) + + + def test_constructor_result(self): + result = model.PutObjectResult() + self.assertIsNone(result.content_md5) + self.assertIsNone(result.etag) + self.assertIsNone(result.hash_crc64) + self.assertIsNone(result.version_id) + self.assertIsNone(result.callback_result) + self.assertIsInstance(result, serde.Model) + + result = model.PutObjectResult( + content_md5='1B2M2Y8AsgTpgAmY7PhC****', + etag='"D41D8CD98F00B204E9800998ECF8****"', + hash_crc64='316181249502703****', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + callback_result={"callbackUrl":"www.abc.com/callback","callbackBody":"${etag}"}, + ) + self.assertEqual('1B2M2Y8AsgTpgAmY7PhC****', result.content_md5) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.etag) + self.assertEqual('316181249502703****', result.hash_crc64) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.version_id) + self.assertEqual({"callbackUrl": "www.abc.com/callback", "callbackBody": "${etag}"}, result.callback_result) + + result = model.PutObjectResult( + version_id='version_id-test', + invalid_field='invalid_field', + ) + self.assertEqual('version_id-test', result.version_id) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = None + result = model.PutObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'content_md5': '1B2M2Y8AsgTpgAmY7PhC****', + 'etag': '"D41D8CD98F00B204E9800998ECF8****"', + 'hash_crc64': '316181249502703****', + 'version_id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={ + 'x-oss-request-id': 'id-1234', + }, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('1B2M2Y8AsgTpgAmY7PhC****', result.headers.get('content_md5')) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.headers.get('etag')) + self.assertEqual('316181249502703****', result.headers.get('hash_crc64')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('version_id')) + + +class TestHeadObject(unittest.TestCase): + def test_constructor_request(self): + request = model.HeadObjectRequest( + bucket='bucket-test', + key='key-test', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.version_id) + self.assertIsNone(request.if_match) + self.assertIsNone(request.if_none_match) + self.assertIsNone(request.if_modified_since) + self.assertIsNone(request.if_unmodified_since) + self.assertIsNone(request.request_payer) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.HeadObjectRequest( + bucket='bucket-test', + key='key-test', + version_id='fba9dede5f27731c9771645a3986****', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + request_payer='request_payer-test', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertEqual('fba9dede5f27731c9771645a3986****', request.version_id) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', request.if_match) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', request.if_none_match) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', request.if_modified_since) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', request.if_unmodified_since) + self.assertEqual('request_payer-test', request.request_payer) + + request = model.HeadObjectRequest( + bucket='bucket-test', + key='key-test', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket-test', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('key-test', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.HeadObjectRequest( + bucket='bucket-test', + key='key-test', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.HeadObjectRequest( + bucket='bucket-test', + key='key-test', + version_id='fba9dede5f27731c9771645a3986****', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + request_payer='request_payer-test', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='HeadObject', + method='HEAD', + bucket=request.bucket, + )) + self.assertEqual('HeadObject', op_input.op_name) + self.assertEqual('HEAD', op_input.method) + self.assertEqual('bucket-test', op_input.bucket) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', op_input.headers.get('If-Match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', op_input.headers.get('If-None-Match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', op_input.headers.get('If-Modified-Since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', op_input.headers.get('If-Unmodified-Since')) + self.assertEqual('request_payer-test', op_input.headers.get('x-oss-request-payer')) + + + def test_constructor_result(self): + result = model.HeadObjectResult() + self.assertIsNone(result.content_length) + self.assertIsNone(result.content_type) + self.assertIsNone(result.etag) + self.assertIsNone(result.last_modified) + self.assertIsNone(result.content_md5) + self.assertIsNone(result.metadata) + self.assertIsNone(result.cache_control) + self.assertIsNone(result.content_disposition) + self.assertIsNone(result.content_encoding) + self.assertIsNone(result.expires) + self.assertIsNone(result.hash_crc64) + self.assertIsNone(result.storage_class) + self.assertIsNone(result.object_type) + self.assertIsNone(result.version_id) + self.assertIsNone(result.tagging_count) + self.assertIsNone(result.server_side_encryption) + self.assertIsNone(result.server_side_data_encryption) + self.assertIsNone(result.sse_kms_key_id) + self.assertIsNone(result.next_append_position) + self.assertIsNone(result.expiration) + self.assertIsNone(result.restore) + self.assertIsNone(result.process_status) + self.assertIsNone(result.request_charged) + self.assertIsNone(result.allow_origin) + self.assertIsNone(result.allow_methods) + self.assertIsNone(result.allow_age) + self.assertIsNone(result.allow_headers) + self.assertIsNone(result.expose_headers) + self.assertIsInstance(result, serde.Model) + + result = model.HeadObjectResult( + content_length=1024, + content_type='text/xml', + etag='"A082B659EF78733A5A042FA253B1****"', + last_modified=datetime.datetime.fromtimestamp(1702743657), + content_md5='B5eJF1ptWaXm4bijSPyxw==', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment; filename=testing.txt', + content_encoding='utf-8', + expires='2023-10-12T00:00:00.000Z', + hash_crc64='GHH^%$#^&INOU(', + storage_class='Archive', + object_type='public-read-write', + version_id='version_id-test', + tagging_count=111, + server_side_encryption='SM4', + server_side_data_encryption='AES256', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + next_append_position='222', + expiration='2022-10-12T00:00:00.000Z', + restore='ongoing-request=\"false\", expiry-date=\"Sun, 16 Apr 2017 08:12:33 GMT\"', + process_status='process_status-test', + request_charged='request_charged-test', + allow_origin='*', + allow_methods='PUT,GET', + allow_age='%#@$#@&^%&(*HIHJ', + allow_headers='{a:a1, b:b2}', + expose_headers='{a:a1, b:b2}', + ) + self.assertEqual(1024, result.content_length) + self.assertEqual('text/xml', result.content_type) + self.assertEqual('"A082B659EF78733A5A042FA253B1****"', result.etag) + self.assertEqual(datetime.datetime.fromtimestamp(1702743657), result.last_modified) + self.assertEqual('2023-12-17T00:20:57.000Z', result.last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('B5eJF1ptWaXm4bijSPyxw==',result.content_md5) + self.assertEqual('no-cache', result.cache_control) + self.assertEqual('attachment; filename=testing.txt',result.content_disposition) + self.assertEqual('utf-8', result.content_encoding) + self.assertEqual('2023-10-12T00:00:00.000Z', result.expires) + self.assertEqual('GHH^%$#^&INOU(', result.hash_crc64) + self.assertEqual('Archive', result.storage_class) + self.assertEqual('public-read-write', result.object_type) + self.assertEqual('version_id-test', result.version_id) + self.assertEqual(111, result.tagging_count) + self.assertEqual('SM4', result.server_side_encryption) + self.assertEqual('AES256', result.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', result.sse_kms_key_id) + self.assertEqual('222', result.next_append_position) + self.assertEqual('2022-10-12T00:00:00.000Z', result.expiration) + self.assertEqual('ongoing-request=\"false\", expiry-date=\"Sun, 16 Apr 2017 08:12:33 GMT\"',result.restore) + self.assertEqual('process_status-test', result.process_status) + self.assertEqual('request_charged-test', result.request_charged) + self.assertEqual('*', result.allow_origin) + self.assertEqual('PUT,GET', result.allow_methods) + self.assertEqual('%#@$#@&^%&(*HIHJ', result.allow_age) + self.assertEqual('{a:a1, b:b2}', result.allow_headers) + self.assertEqual('{a:a1, b:b2}', result.expose_headers) + + result = model.HeadObjectResult( + expose_headers='expose_headers-test', + invalid_field='invalid_field', + ) + self.assertEqual('expose_headers-test', result.expose_headers) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = None + result = model.HeadObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'content_md5': '1B2M2Y8AsgTpgAmY7PhC****', + 'etag': '"D41D8CD98F00B204E9800998ECF8****"', + 'hash_crc64': '316181249502703****', + 'version_id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('1B2M2Y8AsgTpgAmY7PhC****', result.headers.get('content_md5')) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.headers.get('etag')) + self.assertEqual('316181249502703****', result.headers.get('hash_crc64')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('version_id')) + + +class TestGetObject(unittest.TestCase): + def test_constructor_request(self): + request = model.GetObjectRequest( + bucket='bucket-test', + key='key-test', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.if_match) + self.assertIsNone(request.if_none_match) + self.assertIsNone(request.if_modified_since) + self.assertIsNone(request.if_unmodified_since) + self.assertIsNone(request.range_header) + self.assertIsNone(request.range_behavior) + self.assertIsNone(request.response_cache_control) + self.assertIsNone(request.response_content_disposition) + self.assertIsNone(request.response_content_encoding) + self.assertIsNone(request.response_content_language) + self.assertIsNone(request.response_content_type) + self.assertIsNone(request.response_expires) + self.assertIsNone(request.version_id) + self.assertIsNone(request.traffic_limit) + self.assertIsNone(request.process) + self.assertIsNone(request.request_payer) + self.assertIsNone(request.progress_fn) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.GetObjectRequest( + bucket='bucket-test', + key='key-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + range_header='bytes 0~9/44', + range_behavior='standard', + response_cache_control='no-cache', + response_content_disposition='attachment; filename=testing.txt', + response_content_encoding='utf-8', + response_content_language='中文', + response_content_type='text', + response_expires='Fri, 24 Feb 2012 17:00:00 GMT', + version_id='CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY*****', + traffic_limit=1022, + process='process-test', + request_payer='request_payer-test', + progress_fn='progress_fn-test', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', request.if_match) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', request.if_none_match) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', request.if_modified_since) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', request.if_unmodified_since) + self.assertEqual('bytes 0~9/44', request.range_header) + self.assertEqual('standard', request.range_behavior) + self.assertEqual('no-cache', request.response_cache_control) + self.assertEqual('attachment; filename=testing.txt',request.response_content_disposition) + self.assertEqual('utf-8', request.response_content_encoding) + self.assertEqual('中文', request.response_content_language) + self.assertEqual('text', request.response_content_type) + self.assertEqual('Fri, 24 Feb 2012 17:00:00 GMT', request.response_expires) + self.assertEqual('CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY*****', request.version_id) + self.assertEqual(1022, request.traffic_limit) + self.assertEqual('process-test', request.process) + self.assertEqual('request_payer-test', request.request_payer) + self.assertEqual('progress_fn-test', request.progress_fn) + + request = model.GetObjectRequest( + bucket='bucket-test', + key='key-test', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket-test', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('key-test', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.GetObjectRequest( + bucket='bucket-test', + key='key-test', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket-test', request.bucket) + self.assertEqual('key-test', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.GetObjectRequest( + bucket='bucket', + key='key-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + range_header='bytes 0~9/44', + range_behavior='standard', + response_cache_control='no-cache', + response_content_disposition='attachment; filename=testing.txt', + response_content_encoding='utf-8', + response_content_language='中文', + response_content_type='text', + response_expires='Fri, 24 Feb 2012 17:00:00 GMT', + version_id='CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY*****', + traffic_limit=1022, + process='process-test', + request_payer='request_payer-test', + progress_fn='progress_fn-test', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetObject', + method='GET', + bucket=request.bucket, + )) + self.assertEqual('GetObject', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('bucket', op_input.bucket) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', op_input.headers.get('If-Match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', op_input.headers.get('If-None-Match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', op_input.headers.get('If-Modified-Since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', op_input.headers.get('If-Unmodified-Since')) + self.assertEqual('bytes 0~9/44', op_input.headers.get('Range')) + self.assertEqual('standard', op_input.headers.get('x-oss-range-behavior')) + self.assertEqual(1022, int(op_input.headers.get('x-oss-traffic-limit'))) + self.assertEqual('request_payer-test', op_input.headers.get('x-oss-request-payer')) + + + def test_constructor_result(self): + result = model.GetObjectResult() + self.assertIsNone(result.content_length) + self.assertIsNone(result.content_range) + self.assertIsNone(result.content_type) + self.assertIsNone(result.etag) + self.assertIsNone(result.last_modified) + self.assertIsNone(result.content_md5) + self.assertIsNone(result.metadata) + self.assertIsNone(result.cache_control) + self.assertIsNone(result.content_disposition) + self.assertIsNone(result.content_encoding) + self.assertIsNone(result.expires) + self.assertIsNone(result.hash_crc64) + self.assertIsNone(result.storage_class) + self.assertIsNone(result.object_type) + self.assertIsNone(result.version_id) + self.assertIsNone(result.tagging_count) + self.assertIsNone(result.server_side_encryption) + self.assertIsNone(result.server_side_data_encryption) + self.assertIsNone(result.sse_kms_key_id) + self.assertIsNone(result.next_append_position) + self.assertIsNone(result.expiration) + self.assertIsNone(result.restore) + self.assertIsNone(result.process_status) + self.assertIsNone(result.delete_marker) + self.assertIsNone(result.body) + self.assertIsInstance(result, serde.Model) + + result = model.GetObjectResult( + content_length=1024*10, + content_range='bytes 0~9/44', + content_type='application/octet-stream', + etag='etag-test', + last_modified=datetime.datetime.fromtimestamp(1702743657), + content_md5='B5eJF1ptWaXm4bijSPyxw==', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + expires='2022-10-12T00:00:00.000Z', + hash_crc64='316181249502703****', + storage_class='Archive', + object_type='public-read-write', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + tagging_count=2048, + server_side_encryption='AES256', + server_side_data_encryption='SM4', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + next_append_position='234', + expiration='2022-10-12T00:00:00.000Z', + restore='ongoing-request=\"false\", expiry-date=\"Sun, 16 Apr 2017 08:12:33 GMT\"', + process_status='process_status-test', + delete_marker=True, + ) + self.assertEqual(1024 * 10, int(result.content_length)) + self.assertEqual('bytes 0~9/44', result.content_range) + self.assertEqual('application/octet-stream', result.content_type) + self.assertEqual('etag-test', result.etag) + self.assertEqual(datetime.datetime.fromtimestamp(1702743657), result.last_modified) + self.assertEqual('B5eJF1ptWaXm4bijSPyxw==',result.content_md5) + self.assertEqual('no-cache', result.cache_control) + self.assertEqual('attachment', result.content_disposition) + self.assertEqual('utf-8', result.content_encoding) + self.assertEqual('2022-10-12T00:00:00.000Z', result.expires) + self.assertEqual('316181249502703****', result.hash_crc64) + self.assertEqual('Archive', result.storage_class) + self.assertEqual('public-read-write', result.object_type) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.version_id) + self.assertEqual(2048, int(result.tagging_count)) + self.assertEqual('AES256', result.server_side_encryption) + self.assertEqual('SM4', result.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', result.sse_kms_key_id) + self.assertEqual('234', result.next_append_position) + self.assertEqual('2022-10-12T00:00:00.000Z', result.expiration) + self.assertEqual('ongoing-request=\"false\", expiry-date=\"Sun, 16 Apr 2017 08:12:33 GMT\"',result.restore) + self.assertEqual('process_status-test', result.process_status) + self.assertEqual(True, bool(result.delete_marker)) + + result = model.GetObjectResult( + delete_marker=True, + invalid_field='invalid_field', + ) + self.assertEqual(True, result.delete_marker) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = None + result = model.HeadObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'content_md5': '1B2M2Y8AsgTpgAmY7PhC****', + 'etag': '"D41D8CD98F00B204E9800998ECF8****"', + 'hash_crc64': '316181249502703****', + 'version_id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('1B2M2Y8AsgTpgAmY7PhC****', result.headers.get('content_md5')) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.headers.get('etag')) + self.assertEqual('316181249502703****', result.headers.get('hash_crc64')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('version_id')) + + +class TestAppendObject(unittest.TestCase): + def test_constructor_request(self): + request = model.AppendObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + position=0, + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNotNone(request.position) + self.assertIsNone(request.acl) + self.assertIsNone(request.storage_class) + self.assertIsNone(request.metadata) + self.assertIsNone(request.cache_control) + self.assertIsNone(request.content_disposition) + self.assertIsNone(request.content_encoding) + self.assertIsNone(request.content_length) + self.assertIsNone(request.content_md5) + self.assertIsNone(request.content_type) + self.assertIsNone(request.expires) + self.assertIsNone(request.server_side_encryption) + self.assertIsNone(request.server_side_data_encryption) + self.assertIsNone(request.sse_kms_key_id) + self.assertIsNone(request.tagging) + self.assertIsNone(request.forbid_overwrite) + self.assertIsNone(request.traffic_limit) + self.assertIsNone(request.request_payer) + self.assertIsNone(request.body) + self.assertIsNone(request.progress_fn) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.AppendObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + position=0, + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + body='xml_data', + progress_fn='progress_fn-test', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual(0, request.position) + self.assertEqual('private', request.acl) + self.assertEqual('ColdArchive', request.storage_class) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', request.metadata.get("client-side-encryption-key")) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', request.metadata.get("client-side-encryption-start")) + self.assertEqual('AES/CTR/NoPadding', request.metadata.get("client-side-encryption-cek-alg")) + self.assertEqual('RSA/NONE/PKCS1Padding', request.metadata.get("client-side-encryption-wrap-alg")) + self.assertEqual('no-cache', request.cache_control) + self.assertEqual('attachment', request.content_disposition) + self.assertEqual('utf-8', request.content_encoding) + self.assertEqual(101, request.content_length) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', request.content_md5) + self.assertEqual('application/octet-stream', request.content_type) + self.assertEqual('2022-10-12T00:00:00.000Z', request.expires) + self.assertEqual('SM4', request.server_side_encryption) + self.assertEqual('KMS', request.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', request.sse_kms_key_id) + self.assertEqual('tagging-test', request.tagging) + self.assertEqual(True, request.forbid_overwrite) + self.assertEqual(100 * 1024 * 8, request.traffic_limit) + self.assertEqual('requester', request.request_payer) + self.assertEqual('xml_data', request.body) + self.assertEqual('progress_fn-test', request.progress_fn) + + + request = model.AppendObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + position=0, + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('example-object-2.jpg', request.key) + self.assertTrue(hasattr(request, 'position')) + self.assertEqual(0, request.position) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.AppendObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + position=0, + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual(0, request.position) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.AppendObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + position=0, + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + body='xml_data', + progress_fn='progress_fn-test', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='AppendObject', + method='POST', + bucket=request.bucket, + )) + self.assertEqual('AppendObject', op_input.op_name) + self.assertEqual('POST', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual('private', op_input.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', op_input.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', op_input.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', op_input.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', op_input.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', op_input.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', op_input.headers.get('Cache-Control')) + self.assertEqual('attachment', op_input.headers.get('Content-Disposition')) + self.assertEqual('utf-8', op_input.headers.get('Content-Encoding')) + self.assertEqual('101', op_input.headers.get('Content-Length')) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', op_input.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', op_input.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', op_input.headers.get('Expires')) + self.assertEqual('SM4', op_input.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', op_input.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', op_input.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', op_input.headers.get('x-oss-tagging')) + self.assertEqual(True, bool(op_input.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100*1024*8, int(op_input.headers.get('x-oss-traffic-limit'))) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + def test_constructor_result(self): + result = model.AppendObjectResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.AppendObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'x-oss-hash-crc64ecma': '316181249502703****', + 'x-oss-version-id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + 'x-oss-next-append-position': 47, + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('316181249502703****', result.headers.get('x-oss-hash-crc64ecma')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('x-oss-version-id')) + self.assertEqual(47, result.headers.get('x-oss-next-append-position')) + + +class TestCopyObject(unittest.TestCase): + def test_constructor_request(self): + request = model.CopyObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + source_key='source-invalid-key', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNotNone(request.source_key) + self.assertIsNone(request.source_bucket) + self.assertIsNone(request.source_version_id) + self.assertIsNone(request.if_match) + self.assertIsNone(request.if_none_match) + self.assertIsNone(request.if_modified_since) + self.assertIsNone(request.if_unmodified_since) + self.assertIsNone(request.acl) + self.assertIsNone(request.storage_class) + self.assertIsNone(request.metadata) + self.assertIsNone(request.cache_control) + self.assertIsNone(request.content_disposition) + self.assertIsNone(request.content_encoding) + self.assertIsNone(request.content_length) + self.assertIsNone(request.content_md5) + self.assertIsNone(request.content_type) + self.assertIsNone(request.expires) + self.assertIsNone(request.metadata_directive) + self.assertIsNone(request.server_side_encryption) + self.assertIsNone(request.server_side_data_encryption) + self.assertIsNone(request.sse_kms_key_id) + self.assertIsNone(request.tagging) + self.assertIsNone(request.tagging_directive) + self.assertIsNone(request.forbid_overwrite) + self.assertIsNone(request.traffic_limit) + self.assertIsNone(request.request_payer) + self.assertIsNone(request.progress_fn) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.CopyObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + source_key='source-invalid-key', + source_bucket='source_bucket-test', + source_version_id='source_version_id-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + metadata_directive='COPY', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + tagging_directive='tagging_directive-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + progress_fn='progress_fn-test', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual('source-invalid-key', request.source_key) + self.assertEqual('source_bucket-test', request.source_bucket) + self.assertEqual('source_version_id-test', request.source_version_id) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', request.if_match) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', request.if_none_match) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', request.if_modified_since) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', request.if_unmodified_since) + self.assertEqual('private', request.acl) + self.assertEqual('ColdArchive', request.storage_class) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', request.metadata.get("client-side-encryption-key")) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', request.metadata.get("client-side-encryption-start")) + self.assertEqual('AES/CTR/NoPadding', request.metadata.get("client-side-encryption-cek-alg")) + self.assertEqual('RSA/NONE/PKCS1Padding', request.metadata.get("client-side-encryption-wrap-alg")) + self.assertEqual('no-cache', request.cache_control) + self.assertEqual('attachment', request.content_disposition) + self.assertEqual('utf-8', request.content_encoding) + self.assertEqual(101, request.content_length) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', request.content_md5) + self.assertEqual('application/octet-stream', request.content_type) + self.assertEqual('2022-10-12T00:00:00.000Z', request.expires) + self.assertEqual('COPY', request.metadata_directive) + self.assertEqual('SM4', request.server_side_encryption) + self.assertEqual('KMS', request.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', request.sse_kms_key_id) + self.assertEqual('tagging-test', request.tagging) + self.assertEqual('tagging_directive-test', request.tagging_directive) + self.assertEqual(True, request.forbid_overwrite) + self.assertEqual(100 * 1024 * 8, request.traffic_limit) + self.assertEqual('requester', request.request_payer) + self.assertEqual('progress_fn-test', request.progress_fn) + + + request = model.CopyObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + source_key='source-invalid-key', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('example-object-2.jpg', request.key) + self.assertTrue(hasattr(request, 'source_key')) + self.assertEqual('source-invalid-key', request.source_key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.CopyObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + source_key='source-invalid-key', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual('source-invalid-key', request.source_key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.CopyObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + source_key='source-invalid-key', + source_bucket='source_bucket-test', + source_version_id='source_version_id-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + metadata_directive='metadata_directive-test', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + tagging_directive='tagging_directive-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + progress_fn='progress_fn-test', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='CopyObject', + method='POST', + bucket=request.bucket, + )) + self.assertEqual('CopyObject', op_input.op_name) + self.assertEqual('POST', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', op_input.headers.get('x-oss-copy-source-if-match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', op_input.headers.get('x-oss-copy-source-if-none-match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', op_input.headers.get('x-oss-copy-source-if-modified-since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', op_input.headers.get('x-oss-copy-source-if-unmodified-since')) + self.assertEqual('private', op_input.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', op_input.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', op_input.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', op_input.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', op_input.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', op_input.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', op_input.headers.get('Cache-Control')) + self.assertEqual('attachment', op_input.headers.get('Content-Disposition')) + self.assertEqual('utf-8', op_input.headers.get('Content-Encoding')) + self.assertEqual(101, int(op_input.headers.get('Content-Length'))) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', op_input.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', op_input.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', op_input.headers.get('Expires')) + self.assertEqual('metadata_directive-test', op_input.headers.get('x-oss-metadata-directive')) + self.assertEqual('SM4', op_input.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', op_input.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', op_input.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', op_input.headers.get('x-oss-tagging')) + self.assertEqual('tagging_directive-test', op_input.headers.get('x-oss-tagging-directive')) + self.assertEqual(True, bool(op_input.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100*1024*8, int(op_input.headers.get('x-oss-traffic-limit'))) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + def test_constructor_result(self): + result = model.CopyObjectResult() + self.assertIsNone(result.version_id) + self.assertIsNone(result.hash_crc64) + self.assertIsNone(result.source_version_id) + self.assertIsNone(result.server_side_encryption) + self.assertIsNone(result.server_side_data_encryption) + self.assertIsNone(result.sse_kms_key_id) + self.assertIsNone(result.last_modified) + self.assertIsNone(result.etag) + self.assertIsInstance(result, serde.Model) + + result = model.CopyObjectResult( + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + hash_crc64='316181249502703****', + source_version_id='source_version_id-test', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + last_modified=datetime.datetime.fromtimestamp(1702743657), + etag='"D41D8CD98F00B204E9800998ECF8****"', + ) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.version_id) + self.assertEqual('316181249502703****', result.hash_crc64) + self.assertEqual('source_version_id-test', result.source_version_id) + self.assertEqual('SM4', result.server_side_encryption) + self.assertEqual('KMS', result.server_side_data_encryption) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', result.sse_kms_key_id) + self.assertEqual(datetime.datetime.fromtimestamp(1702743657), result.last_modified) + self.assertEqual('2023-12-17T00:20:57.000Z', result.last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.etag) + + result = model.CopyObjectResult( + etag='"D41D8CD98F00B204E9800998ECF8****"', + invalid_field='invalid_field', + ) + self.assertEqual('"D41D8CD98F00B204E9800998ECF8****"', result.etag) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + "C4CA4238A0B923820DCC509A6F75****" + 2019-04-09T03:45:32.000Z +''' + + result = model.CopyObjectResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('"C4CA4238A0B923820DCC509A6F75****"', result.etag) + self.assertEqual("2019-04-09T03:45:32.000Z", result.last_modified.strftime('%Y-%m-%dT%H:%M:%S.000Z')) + + +class TestDeleteObject(unittest.TestCase): + def test_constructor_request(self): + request = model.DeleteObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.version_id) + self.assertIsNone(request.request_payer) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.DeleteObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', request.version_id) + self.assertEqual('requester', request.request_payer) + + request = model.DeleteObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('example-object-2.jpg', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.DeleteObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.DeleteObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='DeleteObject', + method='DELETE', + bucket=request.bucket, + )) + self.assertEqual('DeleteObject', op_input.op_name) + self.assertEqual('DELETE', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + def test_constructor_result(self): + result = model.DeleteObjectResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.DeleteObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'x-oss-hash-crc64ecma': '316181249502703****', + 'x-oss-version-id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('316181249502703****', result.headers.get('x-oss-hash-crc64ecma')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('x-oss-version-id')) + + +class TestDeleteMultipleObjects(unittest.TestCase): + def test_constructor_request(self): + request = model.DeleteMultipleObjectsRequest( + bucket='bucket_name', + objects=[model.DeleteObject( + ),model.DeleteObject( + )], + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.objects) + self.assertIsNone(request.encoding_type) + self.assertIsNone(request.content_length) + self.assertIsNone(request.quiet) + self.assertIsNone(request.request_payer) + self.assertIsNone(request.objects[0].key) + self.assertIsNone(request.objects[0].version_id) + self.assertIsNone(request.objects[1].key) + self.assertIsNone(request.objects[1].version_id) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.DeleteMultipleObjectsRequest( + bucket='bucket_name', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ),model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + ), + ], + encoding_type='url', + content_length=101, + quiet=True, + request_payer='requester', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('key1', request.objects[0].key) + self.assertEqual('ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', request.objects[0].version_id) + self.assertEqual('key2', request.objects[1].key) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', request.objects[1].version_id) + self.assertEqual('url', request.encoding_type) + self.assertEqual(101, request.content_length) + self.assertEqual(True, request.quiet) + self.assertEqual('requester', request.request_payer) + + request = model.DeleteMultipleObjectsRequest( + bucket='bucket_name', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ), + model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + ), + ], + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'objects')) + self.assertEqual('key1', request.objects[0].key) + self.assertEqual('ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', request.objects[0].version_id) + self.assertEqual('key2', request.objects[1].key) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', request.objects[1].version_id) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.DeleteMultipleObjectsRequest( + bucket='bucket_name', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ), + model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + ), + ], + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.DeleteMultipleObjectsRequest( + bucket='bucket_name', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ), + model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + ), + ], + encoding_type='url', + content_length=101, + quiet=True, + request_payer='requester', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='DeleteMultipleObjects', + method='DELETE', + bucket=request.bucket, + )) + self.assertEqual('DeleteMultipleObjects', op_input.op_name) + self.assertEqual('DELETE', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual(101, int(op_input.headers.get('Content-Length'))) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + def test_constructor_result(self): + result = model.DeleteMultipleObjectsResult() + self.assertIsNone(result.deleted_objects) + self.assertIsNone(result.encoding_type) + self.assertIsInstance(result, serde.Model) + + result = model.DeleteMultipleObjectsResult( + deleted_objects=[model.DeletedInfo( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + delete_marker=True, + delete_marker_version_id='E5NmU5NmFhZjhjYmY0****', + ),model.DeletedInfo( + key='key2', + version_id='jOTRmNTE5NmU5NmFhZjhjYmY0****', + delete_marker=False, + delete_marker_version_id='mU5NmFhZjhjYmY0****', + )], + encoding_type='url', + ) + self.assertEqual('key1', result.deleted_objects[0].key) + self.assertEqual('ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.deleted_objects[0].version_id) + self.assertEqual(True, result.deleted_objects[0].delete_marker) + self.assertEqual('E5NmU5NmFhZjhjYmY0****', result.deleted_objects[0].delete_marker_version_id) + self.assertEqual('key2', result.deleted_objects[1].key) + self.assertEqual('jOTRmNTE5NmU5NmFhZjhjYmY0****', result.deleted_objects[1].version_id) + self.assertEqual(False, result.deleted_objects[1].delete_marker) + self.assertEqual('mU5NmFhZjhjYmY0****', result.deleted_objects[1].delete_marker_version_id) + self.assertEqual('url', result.encoding_type) + + result = model.DeleteMultipleObjectsResult( + encoding_type='url', + invalid_field='invalid_field', + ) + self.assertEqual('url', result.encoding_type) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + url + + multipart.data + CAEQNRiBgICEoPiC0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk**** + true + CAEQMhiBgIDXiaaB0BYiIGQzYmRkZGUxMTM1ZDRjOTZhNjk4YjRjMTAyZjhl**** + + + test.jpg + 0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk**** + true + CAEQMhiBgIDB3aWB0BYiIGUzYTA3YzliMzVmNzRkZGM5NjllYTVlMjYyYWEy**** + +''' + + result = model.DeleteMultipleObjectsResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual("url", result.encoding_type) + self.assertEqual("multipart.data", result.deleted_objects[0].key) + self.assertEqual("CAEQNRiBgICEoPiC0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk****", result.deleted_objects[0].version_id) + self.assertEqual("true", result.deleted_objects[0].delete_marker) + self.assertEqual("CAEQMhiBgIDXiaaB0BYiIGQzYmRkZGUxMTM1ZDRjOTZhNjk4YjRjMTAyZjhl****", result.deleted_objects[0].delete_marker_version_id) + self.assertEqual("test.jpg", result.deleted_objects[1].key) + self.assertEqual("0BYiIGMxZWJmYmMzYjE0OTQ0ZmZhYjgzNzkzYjc2NjZk****", result.deleted_objects[1].version_id) + self.assertEqual("true", result.deleted_objects[1].delete_marker) + self.assertEqual("CAEQMhiBgIDB3aWB0BYiIGUzYTA3YzliMzVmNzRkZGM5NjllYTVlMjYyYWEy****", result.deleted_objects[1].delete_marker_version_id) + + +class TestGetObjectMeta(unittest.TestCase): + def test_constructor_request(self): + request = model.GetObjectMetaRequest( + bucket='bucket_name', + key='example-object-2.jpg', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.version_id) + self.assertIsNone(request.request_payer) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.GetObjectMetaRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', request.version_id) + self.assertEqual('requester', request.request_payer) + + request = model.GetObjectMetaRequest( + bucket='bucket_name', + key='example-object-2.jpg', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('example-object-2.jpg', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.GetObjectMetaRequest( + bucket='bucket_name', + key='example-object-2.jpg', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.GetObjectMetaRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='GetObjectMeta', + method='GET', + bucket=request.bucket, + )) + self.assertEqual('GetObjectMeta', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + def test_constructor_result(self): + result = model.GetObjectMetaResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.GetObjectMetaResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'ETag': '"1CF5A685959CA2ED8DE6E5F8ACC2****"', + 'x-oss-last-access-time': 'Thu, 14 Oct 2021 11:49:05 GMT', + 'Last-Modified': 'Tue, 09 Apr 2019 06:24:00 GMT', + + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('"1CF5A685959CA2ED8DE6E5F8ACC2****"', result.headers.get('ETag')) + self.assertEqual('Thu, 14 Oct 2021 11:49:05 GMT', result.headers.get('x-oss-last-access-time')) + self.assertEqual('Tue, 09 Apr 2019 06:24:00 GMT', result.headers.get('Last-Modified')) + + +class TestRestoreObject(unittest.TestCase): + def test_constructor_request(self): + request = model.RestoreObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + ) + self.assertIsNotNone(request.bucket) + self.assertIsNotNone(request.key) + self.assertIsNone(request.version_id) + self.assertIsNone(request.restore_request) + self.assertIsNone(request.request_payer) + self.assertFalse(hasattr(request, 'headers')) + self.assertFalse(hasattr(request, 'parameters')) + self.assertFalse(hasattr(request, 'payload')) + self.assertIsInstance(request, serde.RequestModel) + + request = model.RestoreObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + restore_request=model.RestoreRequest( + days=7, + tier='Expedited', + ), + request_payer='requester', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', request.version_id) + self.assertEqual(7, request.restore_request.days) + self.assertEqual('Expedited', request.restore_request.tier) + self.assertEqual('requester', request.request_payer) + + request = model.RestoreObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + invalid_field='invalid_field', + ) + self.assertTrue(hasattr(request, 'bucket')) + self.assertEqual('bucket_name', request.bucket) + self.assertTrue(hasattr(request, 'key')) + self.assertEqual('example-object-2.jpg', request.key) + self.assertFalse(hasattr(request, 'invalid_field')) + + request = model.RestoreObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + headers={'key1': 'value1'}, + parameters={'parm1': 'value1'}, + payload='hello world', + ) + self.assertEqual('bucket_name', request.bucket) + self.assertEqual('example-object-2.jpg', request.key) + self.assertDictEqual({'key1': 'value1'}, request.headers) + self.assertDictEqual({'parm1': 'value1'}, request.parameters) + self.assertEqual('hello world', request.payload) + + def test_serialize_request(self): + request = model.RestoreObjectRequest( + bucket='bucket_name', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + restore_request=model.RestoreRequest( + days=7, + tier='Expedited', + ), + request_payer='requester', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='RestoreObject', + method='POST', + bucket=request.bucket, + )) + self.assertEqual('RestoreObject', op_input.op_name) + self.assertEqual('POST', op_input.method) + self.assertEqual('bucket_name', op_input.bucket) + self.assertEqual('requester', op_input.headers.get('x-oss-request-payer')) + + root = ET.fromstring(op_input.body) + self.assertEqual('RestoreRequest', root.tag) + self.assertEqual(7, int(root.findtext('Days'))) + self.assertEqual('Expedited', root.findtext('JobParameters.Tier')) + + def test_constructor_result(self): + result = model.RestoreObjectResult() + self.assertIsInstance(result, serde.ResultModel) + + def test_deserialize_result(self): + xml_data = None + result = model.RestoreObjectResult() + serde.deserialize_output( + result, + OperationOutput( + status='OK', + status_code=200, + headers=CaseInsensitiveDict({ + 'x-oss-request-id': '123', + 'x-oss-hash-crc64ecma': '316181249502703****', + 'x-oss-version-id': 'CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + 'x-oss-object-restore-priority': 'Standard', + }), + http_response=MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body=xml_data, + ) + ) + ) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('316181249502703****', result.headers.get('x-oss-hash-crc64ecma')) + self.assertEqual('CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', result.headers.get('x-oss-version-id')) + self.assertEqual('Standard', result.headers.get('x-oss-object-restore-priority')) \ No newline at end of file diff --git a/tests/unit/models/test_region.py b/tests/unit/models/test_region.py new file mode 100644 index 0000000..b8c1b28 --- /dev/null +++ b/tests/unit/models/test_region.py @@ -0,0 +1,121 @@ +# pylint: skip-file +import unittest +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2.models import region as model +from alibabacloud_oss_v2.types import OperationInput +from .. import MockHttpResponse + +class TestDescribeRegions(unittest.TestCase): + def test_constructor_request(self): + request = model.DescribeRegionsRequest() + self.assertIsInstance(request, serde.Model) + + request = model.DescribeRegionsRequest( + regions='oss-cn-hangzhou', + ) + self.assertEqual('oss-cn-hangzhou', request.regions) + + request = model.DescribeRegionsRequest( + regions='oss-cn-hangzhou', + invalid_field='invalid_field' + ) + self.assertTrue(hasattr(request, 'regions')) + self.assertEqual('oss-cn-hangzhou', request.regions) + self.assertFalse(hasattr(request, 'invalid_field')) + + def test_serialize_request(self): + # case 1 + request = model.DescribeRegionsRequest( + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='DescribeRegions', + method='GET', + parameters={ + 'regions': request.regions, + }, + )) + + self.assertEqual('DescribeRegions', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('', op_input.parameters.get('regions')) + self.assertEqual(1, len(op_input.parameters.items())) + + # case 2 + request = model.DescribeRegionsRequest( + regions='oss-cn-hangzhou', + ) + + op_input = serde.serialize_input(request, OperationInput( + op_name='DescribeRegions', + method='GET', + parameters={ + 'regions': request.regions, + }, + )) + + self.assertEqual('DescribeRegions', op_input.op_name) + self.assertEqual('GET', op_input.method) + self.assertEqual('oss-cn-hangzhou', op_input.parameters.get('regions')) + self.assertEqual(1, len(op_input.parameters.items())) + + def test_constructor_result(self): + result = model.DescribeRegionsResult() + self.assertIsNone(result.region_info) + self.assertIsInstance(result, serde.Model) + + result = model.DescribeRegionsResult( + region_info=[model.RegionInfo( + region='oss-cn-hangzhou', + internet_endpoint='oss-cn-hangzhou.aliyuncs.com', + internal_endpoint='oss-cn-hangzhou-internal.aliyuncs.com', + accelerate_endpoint='oss-accelerate.aliyuncs.com', + )], + ) + self.assertEqual('oss-cn-hangzhou', result.region_info[0].region) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.region_info[0].internal_endpoint) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.region_info[0].internet_endpoint) + self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[0].accelerate_endpoint) + + result = model.DescribeRegionsResult( + region_info=[model.RegionInfo( + region='oss-cn-hangzhou', + internet_endpoint='oss-cn-hangzhou.aliyuncs.com', + internal_endpoint='oss-cn-hangzhou-internal.aliyuncs.com', + accelerate_endpoint='oss-accelerate.aliyuncs.com', + )], + invalid_field='invalid_field' + ) + self.assertEqual('oss-cn-hangzhou', result.region_info[0].region) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.region_info[0].internal_endpoint) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.region_info[0].internet_endpoint) + self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[0].accelerate_endpoint) + self.assertFalse(hasattr(result, 'invalid_field')) + + def test_deserialize_result(self): + xml_data = r''' + + + oss-cn-hangzhou + oss-cn-hangzhou.aliyuncs.com + oss-cn-hangzhou-internal.aliyuncs.com + oss-accelerate.aliyuncs.com + + + oss-cn-shanghai + oss-cn-shanghai.aliyuncs.com + oss-cn-shanghai-internal.aliyuncs.com + oss-accelerate.aliyuncs.com + +''' + + result = model.DescribeRegionsResult() + serde.deserialize_xml(xml_data=xml_data, obj=result) + self.assertEqual('oss-cn-hangzhou', result.region_info[0].region) + self.assertEqual('oss-cn-hangzhou-internal.aliyuncs.com', result.region_info[0].internal_endpoint) + self.assertEqual('oss-cn-hangzhou.aliyuncs.com', result.region_info[0].internet_endpoint) + self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[0].accelerate_endpoint) + self.assertEqual('oss-cn-shanghai', result.region_info[1].region) + self.assertEqual('oss-cn-shanghai-internal.aliyuncs.com', result.region_info[1].internal_endpoint) + self.assertEqual('oss-cn-shanghai.aliyuncs.com', result.region_info[1].internet_endpoint) + self.assertEqual('oss-accelerate.aliyuncs.com', result.region_info[1].accelerate_endpoint) \ No newline at end of file diff --git a/tests/unit/models/test_service.py b/tests/unit/models/test_service.py new file mode 100644 index 0000000..a5dc125 --- /dev/null +++ b/tests/unit/models/test_service.py @@ -0,0 +1,7 @@ +# pylint: skip-file +import unittest +import xml.etree.ElementTree as ET +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2.models import service as model +from alibabacloud_oss_v2.types import OperationInput, OperationOutput, CaseInsensitiveDict +from .. import MockHttpResponse diff --git a/tests/unit/operations/__init__.py b/tests/unit/operations/__init__.py new file mode 100644 index 0000000..b93f6e8 --- /dev/null +++ b/tests/unit/operations/__init__.py @@ -0,0 +1,77 @@ +# pylint: skip-file +import unittest +from alibabacloud_oss_v2.types import HttpRequest, HttpResponse, HttpClient +from .. import MockHttpResponse, mock_client + +class TestOperations(unittest.TestCase): + def setUp(self): + self.set_requestFunc(None) + self.set_responseFunc(None) + + def tearDown(self): + pass + + @classmethod + def setUpClass(cls): + cls.request_dump: HttpRequest = None + cls.client = mock_client(cls.requestFunc, cls.responseFunc) + cls.invoke_request = None + cls.invoke_response = None + + @classmethod + def tearDownClass(cls): + pass + + @classmethod + def requestFunc(cls, request: HttpRequest): + cls.request_dump = request + if cls.invoke_request is not None: + cls.invoke_request(request) + + @classmethod + def responseFunc(cls) -> MockHttpResponse: + if cls.invoke_response is not None: + return cls.invoke_response() + + return MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body='' + ) + + @classmethod + def set_requestFunc(cls, fn): + cls.invoke_request = fn + + @classmethod + def set_responseFunc(cls, fn): + cls.invoke_response = fn + + @classmethod + def response_403_InvalidAccessKeyId(cls) -> MockHttpResponse: + err_xml = r''' + + InvalidAccessKeyId + The OSS Access Key Id you provided does not exist in our records. + id-1234 + oss-cn-hangzhou.aliyuncs.com + ak + 0002-00000902 + https://api.aliyun.com/troubleshoot?q=0002-00000902 + + ''' + return MockHttpResponse( + status_code=403, + reason='Forbidden', + headers={ + 'Server': 'AliyunOSS', + 'Date': 'Tue, 23 Jul 2024 13:01:06 GMT', + 'Content-Type': 'application/xml', + 'x-oss-ec': '0002-00000902', + 'x-oss-request-id': 'id-1234', + }, + body=err_xml.encode() + ) + + diff --git a/tests/unit/operations/test_bucket_basic.py b/tests/unit/operations/test_bucket_basic.py new file mode 100644 index 0000000..0ea5f6f --- /dev/null +++ b/tests/unit/operations/test_bucket_basic.py @@ -0,0 +1,423 @@ +# pylint: skip-file +from typing import cast +import xml.etree.ElementTree as ET +from alibabacloud_oss_v2 import exceptions +from alibabacloud_oss_v2.models import bucket_basic as model +from alibabacloud_oss_v2.operations import bucket_basic as operations +from . import TestOperations + +class TestBucketBasic(TestOperations): + + def test_put_bucket(self): + request = model.PutBucketRequest( + bucket='bucket', + acl='private', + resource_group_id='rg-id', + create_bucket_configuration=model.CreateBucketConfiguration( + storage_class='Standard' + ), + ) + + result = operations.put_bucket(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertIn('private', self.request_dump.headers.get('x-oss-acl')) + self.assertIn('rg-id', self.request_dump.headers.get('x-oss-resource-group-id')) + + root = ET.fromstring(self.request_dump.body) + self.assertEqual('CreateBucketConfiguration', root.tag) + self.assertEqual('Standard', root.findtext('StorageClass')) + self.assertEqual(None, root.findtext('DataRedundancyType')) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual('id-1234', result.request_id) + + def test_put_bucket_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.PutBucketRequest( + bucket='bucket', + acl='private', + resource_group_id='rg-id', + ) + + try: + result = operations.put_bucket(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertIn('private', self.request_dump.headers.get('x-oss-acl')) + self.assertIn('rg-id', self.request_dump.headers.get('x-oss-resource-group-id')) + + def test_put_bucket_acl(self): + request = model.PutBucketAclRequest( + bucket='bucket', + acl='private', + ) + + result = operations.put_bucket_acl(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertIn('private', self.request_dump.headers.get('x-oss-acl')) + + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_put_bucket_acl_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.PutBucketAclRequest( + bucket='bucket', + acl='private', + ) + + try: + result = operations.put_bucket_acl(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertIn('private', self.request_dump.headers.get('x-oss-acl')) + + def test_get_bucket_acl(self): + request = model.GetBucketAclRequest( + bucket='bucket', + ) + + result = operations.get_bucket_acl(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_get_bucket_acl_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetBucketAclRequest( + bucket='bucket', + ) + + try: + result = operations.get_bucket_acl(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?acl=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_list_objects_v2(self): + request = model.ListObjectsV2Request( + bucket='example-bucket', + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + ) + + result = operations.list_objects_v2(self.client, request) + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&list-type=2&delimiter=%2F&start-after=b&continuation-token=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa&fetch-owner=true', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_list_objects_v2_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.ListObjectsV2Request( + bucket='example-bucket', + delimiter='/', + start_after='b', + encoding_type='url', + continuation_token='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + fetch_owner=True, + request_payer='requester', + ) + + try: + result = operations.list_objects_v2(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&list-type=2&delimiter=%2F&start-after=b&continuation-token=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa&fetch-owner=true', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_get_bucket_stat(self): + request = model.GetBucketStatRequest( + bucket='bucket', + ) + + result = operations.get_bucket_stat(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?stat=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_get_bucket_stat_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetBucketStatRequest( + bucket='bucket', + ) + + try: + result = operations.get_bucket_stat(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?stat=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_list_objects(self): + request = model.ListObjectsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + request_payer='requester', + ) + + result = operations.list_objects(self.client, request) + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&delimiter=%2F&marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + self.assertIn('requester', self.request_dump.headers.get('x-oss-request-payer')) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_list_objects_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.ListObjectsRequest( + bucket='example-bucket', + delimiter='/', + encoding_type='url', + marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + request_payer='requester', + ) + + try: + result = operations.list_objects(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?encoding-type=url&delimiter=%2F&marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&max-keys=10&prefix=aaa', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + self.assertIn('requester', self.request_dump.headers.get('x-oss-request-payer')) + + def test_get_bucket_info(self): + request = model.GetBucketInfoRequest( + bucket='bucket', + ) + + result = operations.get_bucket_info(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?bucketInfo=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_get_bucket_info_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetBucketInfoRequest( + bucket='bucket', + ) + + try: + result = operations.get_bucket_info(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?bucketInfo=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_get_bucket_location(self): + request = model.GetBucketInfoRequest( + bucket='bucket', + ) + + result = operations.get_bucket_location(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?location=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_get_bucket_location_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetBucketInfoRequest( + bucket='bucket', + ) + + try: + result = operations.get_bucket_location(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?location=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_put_bucket_versioning(self): + request = model.PutBucketVersioningRequest( + bucket='bucket', + versioning_configuration=model.VersioningConfiguration( + status='Enabled' + ) + ) + + xml_data = r'''Enabled''' + + result = operations.put_bucket_versioning(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertEqual(xml_data.encode(), self.request_dump.body) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_put_bucket_versioning_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.PutBucketVersioningRequest( + bucket='bucket', + ) + + try: + result = operations.put_bucket_versioning(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + + def test_get_bucket_versioning(self): + request = model.GetBucketVersioningRequest( + bucket='bucket', + ) + + result = operations.get_bucket_versioning(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_get_bucket_versioning_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetBucketVersioningRequest( + bucket='bucket', + ) + + try: + result = operations.get_bucket_versioning(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?versioning=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + + def test_list_object_versions(self): + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + encoding_type='url', + delimiter='/', + key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1', + request_payer='requester', + ) + + result = operations.list_object_versions(self.client, request) + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?versions=&delimiter=%2F&key-marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&version-id-marker=CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1&max-keys=10&prefix=aaa&encoding-type=url', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_list_object_versions_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.ListObjectVersionsRequest( + bucket='example-bucket', + encoding_type='url', + delimiter='/', + key_marker='ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA', + max_keys=10, + prefix='aaa', + version_id_marker='CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1', + request_payer='requester', + ) + + try: + result = operations.list_object_versions(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://example-bucket.oss-cn-hangzhou.aliyuncs.com/?versions=&delimiter=%2F&key-marker=ChR1c2VyL2VyaWMvZGVtbzMuanNvbhAA&version-id-marker=CAEQMxiBgICbof2D0BYiIGRhZjgwMzJiMjA3MjQ0ODE5MWYxZDYwMzJlZjU1&max-keys=10&prefix=aaa&encoding-type=url', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) diff --git a/tests/unit/operations/test_object_basic.py b/tests/unit/operations/test_object_basic.py new file mode 100644 index 0000000..b7f0e1c --- /dev/null +++ b/tests/unit/operations/test_object_basic.py @@ -0,0 +1,523 @@ +# pylint: skip-file +from typing import cast +import xml.etree.ElementTree as ET +from alibabacloud_oss_v2 import exceptions +from alibabacloud_oss_v2.models import object_basic as model +from alibabacloud_oss_v2.operations import object_basic as operations +from . import TestOperations + +class TestObjectBasic(TestOperations): + + def test_put_object(self): + request = model.PutObjectRequest( + bucket='bucket', + key='key-test', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyxw==', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + callback='{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', + callback_var='{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', + forbid_overwrite=True, + traffic_limit=100 * 1024 * 8, + request_payer='request_payer-test', + body='body-test', + progress_fn='progress_fn-test', + ) + + result = operations.put_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control')) + self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition')) + self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding')) + self.assertEqual(101, int(self.request_dump.headers.get('Content-Length'))) + self.assertEqual('B5eJF1ptWaXm4bijSPyxw==', self.request_dump.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires')) + self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging')) + self.assertEqual('{\"callbackUrl\":\"www.abc.com/callback\",\"callbackBody\":\"${etag}\"}', self.request_dump.headers.get('x-oss-callback')) + self.assertEqual('{\"x:var1\":\"value1\",\"x:var2\":\"value2\"}', self.request_dump.headers.get('x-oss-callback-var')) + self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100 * 1024 * 8, int(self.request_dump.headers.get('x-oss-traffic-limit'))) + self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer')) + + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual('id-1234', result.request_id) + + def test_put_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.PutObjectRequest( + bucket='bucket', + key='key-test', + acl='private', + storage_class='ColdArchive', + ) + + try: + result = operations.put_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl')) + + + def test_head_object(self): + request = model.HeadObjectRequest( + bucket='bucket', + key='key-test', + version_id='fba9dede5f27731c9771645a3986', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + request_payer='request_payer-test', + ) + + result = operations.head_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?versionId=fba9dede5f27731c9771645a3986', self.request_dump.url) + self.assertEqual('HEAD', self.request_dump.method) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('If-Match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('If-None-Match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('If-Modified-Since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('If-Unmodified-Since')) + self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer')) + + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_head_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.HeadObjectRequest( + bucket='bucket', + key='key-test', + ) + + try: + result = operations.head_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url) + self.assertEqual('HEAD', self.request_dump.method) + + + def test_get_object(self): + request = model.GetObjectRequest( + bucket='bucket', + key='key-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + range_header='bytes 0~9/44', + range_behavior='standard', + response_cache_control='no-cache', + response_content_disposition='attachment; filename=testing.txt', + response_content_encoding='utf-8', + response_content_language='中文', + response_content_type='text', + response_expires='Fri, 24 Feb 2012 17:00:00 GMT', + version_id='CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY*****', + traffic_limit=1022, + process='process-test', + request_payer='request_payer-test', + progress_fn='progress_fn-test', + ) + + result = operations.get_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?response-cache-control=no-cache&response-content-disposition=attachment%3B%20filename%3Dtesting.txt&response-content-encoding=utf-8&response-content-language=%E4%B8%AD%E6%96%87&response-content-type=text&response-expires=Fri%2C%2024%20Feb%202012%2017%3A00%3A00%20GMT&versionId=CAEQNhiBgM0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY%2A%2A%2A%2A%2A&x-oss-process=process-test', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('If-Match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('If-None-Match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('If-Modified-Since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('If-Unmodified-Since')) + self.assertEqual('bytes 0~9/44', self.request_dump.headers.get('Range')) + self.assertEqual('standard', self.request_dump.headers.get('x-oss-range-behavior')) + self.assertEqual(1022, int(self.request_dump.headers.get('x-oss-traffic-limit'))) + self.assertEqual('request_payer-test', self.request_dump.headers.get('x-oss-request-payer')) + + def test_get_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetObjectRequest( + bucket='bucket', + key='key-test', + ) + + try: + result = operations.get_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + def test_append_object(self): + request = model.AppendObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + position=10, + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + body='xml_data', + progress_fn='progress_fn-test', + ) + + result = operations.append_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?append=&position=10', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control')) + self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition')) + self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding')) + self.assertEqual('101', self.request_dump.headers.get('Content-Length')) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires')) + self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging')) + self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100*1024*8, int(self.request_dump.headers.get('x-oss-traffic-limit'))) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) + + def test_append_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.AppendObjectRequest( + bucket='bucket', + key='key-test', + position=10, + ) + + try: + result = operations.append_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/key-test?append=&position=10', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + + def test_copy_object(self): + request = model.CopyObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + source_key='source-invalid-key', + source_bucket='source_bucket-test', + source_version_id='source_version_id-test', + if_match='D41D8CD98F00B204E9800998ECF8****', + if_none_match='D41D8CD98F00B204E9800998ECF9****', + if_modified_since='Fri, 13 Nov 2023 14:47:53 GMT', + if_unmodified_since='Fri, 13 Nov 2015 14:47:53 GMT', + acl='private', + storage_class='ColdArchive', + metadata={ + "client-side-encryption-key": "nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=", + "client-side-encryption-start": "De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=", + "client-side-encryption-cek-alg": "AES/CTR/NoPadding", + "client-side-encryption-wrap-alg": "RSA/NONE/PKCS1Padding", + }, + cache_control='no-cache', + content_disposition='attachment', + content_encoding='utf-8', + content_length=101, + content_md5='B5eJF1ptWaXm4bijSPyx', + content_type='application/octet-stream', + expires='2022-10-12T00:00:00.000Z', + metadata_directive='metadata_directive-test', + server_side_encryption='SM4', + server_side_data_encryption='KMS', + sse_kms_key_id='9468da86-3509-4f8d-a61e-6eab1eac****', + tagging='tagging-test', + tagging_directive='tagging_directive-test', + forbid_overwrite=True, + traffic_limit=100*1024*8, + request_payer='requester', + progress_fn='progress_fn-test', + ) + + result = operations.copy_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg', self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + self.assertEqual('D41D8CD98F00B204E9800998ECF8****', self.request_dump.headers.get('x-oss-copy-source-if-match')) + self.assertEqual('D41D8CD98F00B204E9800998ECF9****', self.request_dump.headers.get('x-oss-copy-source-if-none-match')) + self.assertEqual('Fri, 13 Nov 2023 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-modified-since')) + self.assertEqual('Fri, 13 Nov 2015 14:47:53 GMT', self.request_dump.headers.get('x-oss-copy-source-if-unmodified-since')) + self.assertEqual('private', self.request_dump.headers.get('x-oss-object-acl')) + self.assertEqual('ColdArchive', self.request_dump.headers.get('x-oss-storage-class')) + self.assertEqual('nyXOp7delQ/MQLjKQMhHLaTHIB6q+C+RA6lGwqqYVa+n3aV5uWhygyv1MWmESurppg=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-key')) + self.assertEqual('De/S3T8wFjx7QPxAAFl7h7TeI2EsZlfCwovrHyoSZGr343NxCUGIp6fQ9sSuOLMoJg7hNw=', self.request_dump.headers.get('x-oss-meta-client-side-encryption-start')) + self.assertEqual('AES/CTR/NoPadding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-cek-alg')) + self.assertEqual('RSA/NONE/PKCS1Padding', self.request_dump.headers.get('x-oss-meta-client-side-encryption-wrap-alg')) + self.assertEqual('no-cache', self.request_dump.headers.get('Cache-Control')) + self.assertEqual('attachment', self.request_dump.headers.get('Content-Disposition')) + self.assertEqual('utf-8', self.request_dump.headers.get('Content-Encoding')) + self.assertEqual(101, int(self.request_dump.headers.get('Content-Length'))) + self.assertEqual('B5eJF1ptWaXm4bijSPyx', self.request_dump.headers.get('Content-MD5')) + self.assertEqual('application/octet-stream', self.request_dump.headers.get('Content-Type')) + self.assertEqual('2022-10-12T00:00:00.000Z', self.request_dump.headers.get('Expires')) + self.assertEqual('metadata_directive-test', self.request_dump.headers.get('x-oss-metadata-directive')) + self.assertEqual('SM4', self.request_dump.headers.get('x-oss-server-side-encryption')) + self.assertEqual('KMS', self.request_dump.headers.get('x-oss-server-side-data-encryption')) + self.assertEqual('9468da86-3509-4f8d-a61e-6eab1eac****', self.request_dump.headers.get('x-oss-server-side-encryption-key-id')) + self.assertEqual('tagging-test', self.request_dump.headers.get('x-oss-tagging')) + self.assertEqual('tagging_directive-test', self.request_dump.headers.get('x-oss-tagging-directive')) + self.assertEqual(True, bool(self.request_dump.headers.get('x-oss-forbid-overwrite'))) + self.assertEqual(100*1024*8, int(self.request_dump.headers.get('x-oss-traffic-limit'))) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + + def test_copy_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.CopyObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + source_key='source-invalid-key', + ) + + try: + result = operations.copy_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg',self.request_dump.url) + self.assertEqual('PUT', self.request_dump.method) + + def test_delete_object(self): + request = model.DeleteObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + + result = operations.delete_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url) + self.assertEqual('DELETE', self.request_dump.method) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + + def test_delete_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.DeleteObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + ) + + try: + result = operations.delete_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg', self.request_dump.url) + self.assertEqual('DELETE', self.request_dump.method) + + def test_delete_multiple_objects(self): + request = model.DeleteMultipleObjectsRequest( + bucket='bucket', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ), model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + )], + encoding_type='url', + content_length=101, + quiet=True, + request_payer='requester', + ) + + result = operations.delete_multiple_objects(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?delete=&encoding-type=url', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + self.assertEqual(101, int(self.request_dump.headers.get('Content-Length'))) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + + def test_delete_multiple_objects_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.DeleteMultipleObjectsRequest( + bucket='bucket', + objects=[model.DeleteObject( + key='key1', + version_id='ZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + ), model.DeleteObject( + key='key2', + version_id='CAEQNhiBgMDJgZCA0BYiIDZjhjYmY0****', + )], + ) + + try: + result = operations.delete_multiple_objects(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/?delete=&encoding-type=url', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + + def test_get_object_meta(self): + request = model.GetObjectMetaRequest( + bucket='bucket', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + request_payer='requester', + ) + + result = operations.get_object_meta(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?objectMeta=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url) + self.assertEqual('HEAD', self.request_dump.method) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + + def test_get_object_meta_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.GetObjectMetaRequest( + bucket='bucket', + key='example-object-2.jpg', + ) + + try: + result = operations.get_object_meta(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?objectMeta=', self.request_dump.url) + self.assertEqual('HEAD', self.request_dump.method) + + def test_restore_object(self): + request = model.RestoreObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + version_id='CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0****', + restore_request=model.RestoreRequest( + days=7, + tier='Expedited', + ), + request_payer='requester', + ) + result = operations.restore_object(self.client, request) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?restore=&versionId=CAEQNhiBgMDJgZCA0BYiIDc4MGZjZGI2OTBjOTRmNTE5NmU5NmFhZjhjYmY0%2A%2A%2A%2A', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + self.assertEqual('requester', self.request_dump.headers.get('x-oss-request-payer')) + + root = ET.fromstring(self.request_dump.body) + self.assertEqual('RestoreRequest', root.tag) + self.assertEqual(7, int(root.findtext('Days'))) + self.assertEqual('Expedited', root.findtext('JobParameters.Tier')) + + def test_restore_object_fail(self): + self.set_responseFunc(self.response_403_InvalidAccessKeyId) + request = model.RestoreObjectRequest( + bucket='bucket', + key='example-object-2.jpg', + ) + + try: + result = operations.restore_object(self.client, request) + self.fail('should not here') + except exceptions.OperationError as ope: + self.assertIsInstance(ope.unwrap(), exceptions.ServiceError) + serr = cast(exceptions.ServiceError, ope.unwrap()) + self.assertEqual(403, serr.status_code) + self.assertEqual('id-1234', serr.request_id) + self.assertEqual('InvalidAccessKeyId', serr.code) + + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/example-object-2.jpg?restore=', self.request_dump.url) + self.assertEqual('POST', self.request_dump.method) + diff --git a/tests/unit/operations/test_region.py b/tests/unit/operations/test_region.py new file mode 100644 index 0000000..f489dec --- /dev/null +++ b/tests/unit/operations/test_region.py @@ -0,0 +1,15 @@ +# pylint: skip-file +from typing import cast +from alibabacloud_oss_v2.models import region as model +from alibabacloud_oss_v2.operations import region as operations +from . import TestOperations + +class TestRegion(TestOperations): + def test_describe_regions(self): + request = model.DescribeRegionsRequest() + result = operations.describe_regions(self.client, request) + self.assertEqual('https://oss-cn-hangzhou.aliyuncs.com/?regions=', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + + self.assertEqual(200, result.status_code) + self.assertEqual('OK', result.status) \ No newline at end of file diff --git a/tests/unit/operations/test_service.py b/tests/unit/operations/test_service.py new file mode 100644 index 0000000..eb5a5c1 --- /dev/null +++ b/tests/unit/operations/test_service.py @@ -0,0 +1,8 @@ +# pylint: skip-file +from typing import cast +from alibabacloud_oss_v2.models import service as model +from alibabacloud_oss_v2.operations import service as operations +from . import TestOperations + +class TestService(TestOperations): + """""" \ No newline at end of file diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/retry/test_backoff.py b/tests/unit/retry/test_backoff.py new file mode 100644 index 0000000..f90cb26 --- /dev/null +++ b/tests/unit/retry/test_backoff.py @@ -0,0 +1,40 @@ +# pylint: skip-file + +import unittest +from alibabacloud_oss_v2.retry import backoff + +attempted_celling = 64 + +class TestBackoff(unittest.TestCase): + + def test_equal_jitter_backoff(self): + basedelay = 1 + maxdelay = 20 + r = backoff.EqualJitterBackoff(base_delay=basedelay,max_backoff=maxdelay) + self.assertIsNotNone(r) + + for i in range(attempted_celling * 2): + delay = r.backoff_delay(i, None) + self.assertGreater(delay, 0.0) + self.assertLess(delay, maxdelay + 1) + + def test_full_jitter_backoff(self): + basedelay = 1 + maxdelay = 20 + r = backoff.FullJitterBackoff(base_delay=basedelay,max_backoff=maxdelay) + self.assertIsNotNone(r) + + for i in range(attempted_celling * 2): + delay = r.backoff_delay(i, None) + self.assertGreater(delay, 0.0) + self.assertLess(delay, maxdelay + 1) + + + def test_fixed_delay_backoff(self): + maxdelay = 20 + r = backoff.FixedDelayBackoff(maxdelay) + self.assertIsNotNone(r) + + for i in range(attempted_celling * 2): + delay = r.backoff_delay(i, None) + self.assertEqual(delay,maxdelay) diff --git a/tests/unit/retry/test_error_retryable.py b/tests/unit/retry/test_error_retryable.py new file mode 100644 index 0000000..3706dd6 --- /dev/null +++ b/tests/unit/retry/test_error_retryable.py @@ -0,0 +1,65 @@ +# pylint: skip-file + +import unittest +from alibabacloud_oss_v2.retry import error_retryable +from alibabacloud_oss_v2.exceptions import ServiceError, RequestError, ResponseError + +attempted_celling = 64 + + +def gen_status_code_error(status_code: int): + return ServiceError( + status_code=status_code, + code='', + request_id='', + message='', + ec='', + timestamp='', + request_target='' + ) + + +def gen_service_code_error(code: str): + return ServiceError( + status_code=0, + code=code, + request_id='', + message='', + ec='', + timestamp='', + request_target='' + ) + + +class TestErrorRetryable(unittest.TestCase): + + def test_http_status_code_retryable(self): + r = error_retryable.HTTPStatusCodeRetryable() + self.assertFalse(r.is_error_retryable(Exception())) + self.assertFalse(r.is_error_retryable(None)) + self.assertFalse(r.is_error_retryable(gen_status_code_error(403))) + self.assertFalse(r.is_error_retryable(gen_status_code_error(405))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(401))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(408))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(429))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(500))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(501))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(599))) + + def test_service_error_code_retryable(self): + r = error_retryable.ServiceErrorCodeRetryable() + self.assertFalse(r.is_error_retryable(Exception())) + self.assertFalse(r.is_error_retryable(None)) + self.assertFalse(r.is_error_retryable(gen_service_code_error('123'))) + self.assertTrue(r.is_error_retryable( + gen_service_code_error("RequestTimeTooSkewed"))) + self.assertTrue(r.is_error_retryable( + gen_service_code_error("BadRequest"))) + + def test_client_error_retryable(self): + r = error_retryable.ClientErrorRetryable() + self.assertFalse(r.is_error_retryable(Exception())) + self.assertFalse(r.is_error_retryable(None)) + + self.assertTrue(r.is_error_retryable(RequestError(error=Exception()))) + self.assertTrue(r.is_error_retryable(ResponseError(error=Exception()))) diff --git a/tests/unit/retry/test_retryer_impl.py b/tests/unit/retry/test_retryer_impl.py new file mode 100644 index 0000000..030a9a2 --- /dev/null +++ b/tests/unit/retry/test_retryer_impl.py @@ -0,0 +1,99 @@ +# pylint: skip-file +from typing import List +import unittest +from alibabacloud_oss_v2.retry import retryer_impl, error_retryable +from alibabacloud_oss_v2.exceptions import ServiceError +from alibabacloud_oss_v2.defaults import DEFAULT_BASE_DELAY_S, DEFAULT_MAX_ATTEMPTS, DEFAULT_MAX_BACKOFF_S + +def gen_status_code_error(status_code: int): + return ServiceError( + status_code=status_code, + code='', + request_id='', + message='', + ec='', + timestamp='', + request_target='' + ) + +def gen_service_code_error(code: str): + return ServiceError( + status_code=0, + code=code, + request_id='', + message='', + ec='', + timestamp='', + request_target='' + ) + + +class NopRetryable(error_retryable.ErrorRetryable): + def is_error_retryable(self, error: Exception) -> bool: + return False + + +class TestRetryerImpl(unittest.TestCase): + + def test_nop_retryer(self): + r = retryer_impl.NopRetryer() + self.assertEqual(1, r.max_attempts()) + self.assertFalse(r.is_error_retryable(Exception())) + + def test_standard_retryer_deafult(self): + r = retryer_impl.StandardRetryer() + self.assertIsNotNone(r) + self.assertEqual(DEFAULT_MAX_ATTEMPTS, r.max_attempts()) + + self.assertFalse(r.is_error_retryable(None)) + self.assertFalse(r.is_error_retryable(Exception())) + self.assertFalse(r.is_error_retryable(gen_status_code_error(403))) + self.assertFalse(r.is_error_retryable(gen_status_code_error(405))) + self.assertTrue(r.is_error_retryable(gen_status_code_error(401))) + self.assertFalse(r.is_error_retryable(gen_service_code_error('123'))) + self.assertTrue(r.is_error_retryable( + gen_service_code_error("RequestTimeTooSkewed"))) + self.assertTrue(r.is_error_retryable( + gen_service_code_error("BadRequest"))) + + error = gen_status_code_error(501) + self.assertTrue(r.is_error_retryable(error)) + for i in range(64 * 2): + delay = r.retry_delay(i, error) + self.assertGreater(delay, 0.0) + self.assertLess(delay, DEFAULT_MAX_BACKOFF_S + 1) + + def test_standard_retryer(self): + max_backoff = 30.5 + r = retryer_impl.StandardRetryer( + max_attempts=4, + max_backoff=max_backoff, + base_delay=1.0, + error_retryables=[NopRetryable()] + ) + self.assertIsNotNone(r) + self.assertEqual(4, r.max_attempts()) + + self.assertFalse(r.is_error_retryable(None)) + self.assertFalse(r.is_error_retryable(Exception())) + self.assertFalse(r.is_error_retryable(gen_status_code_error(403))) + self.assertFalse(r.is_error_retryable(gen_status_code_error(405))) + self.assertFalse(r.is_error_retryable(gen_status_code_error(401))) + self.assertFalse(r.is_error_retryable(gen_service_code_error('123'))) + self.assertFalse(r.is_error_retryable( + gen_service_code_error("RequestTimeTooSkewed"))) + self.assertFalse(r.is_error_retryable( + gen_service_code_error("BadRequest"))) + + error = gen_status_code_error(501) + self.assertFalse(r.is_error_retryable(error)) + values:List[float] = [] + for i in range(64 * 2): + delay = r.retry_delay(i, error) + self.assertGreater(delay, 0.0) + self.assertLess(delay, max_backoff + 1) + if delay > DEFAULT_MAX_BACKOFF_S: + values.append(delay) + + self.assertGreater(len(values), 0) + diff --git a/tests/unit/signer/__init__.py b/tests/unit/signer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/signer/test_v1.py b/tests/unit/signer/test_v1.py new file mode 100644 index 0000000..030b289 --- /dev/null +++ b/tests/unit/signer/test_v1.py @@ -0,0 +1,229 @@ +# pylint: skip-file +import unittest +import datetime +from urllib.parse import urlencode, quote, urlsplit +from alibabacloud_oss_v2.signer import SignerV1 +from alibabacloud_oss_v2.credentials import StaticCredentialsProvider +from alibabacloud_oss_v2.types import HttpRequest, SigningContext + +class TestSignerV1(unittest.TestCase): + + def test_auth_header_1(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://examplebucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'Content-MD5': 'eB5eJF1ptWaXm4bijSPyxw==', + 'Content-Type': 'text/html', + 'x-oss-meta-author': 'alice', + 'x-oss-meta-magic': 'abracadabra', + 'x-oss-date': 'Wed, 28 Dec 2022 10:27:41 GMT', + } + ) + + context = SigningContext( + bucket='examplebucket', + key='nelson', + request=request, + credentials=cred, + signing_time=datetime.datetime.fromtimestamp(1702743657), + ) + + signer = SignerV1() + signer.sign(context) + self.assertEqual("PUT\neB5eJF1ptWaXm4bijSPyxw==\ntext/html\nWed, 28 Dec 2022 10:27:41 GMT\nx-oss-date:Wed, 28 Dec 2022 10:27:41 GMT\nx-oss-meta-author:alice\nx-oss-meta-magic:abracadabra\n/examplebucket/nelson", context.string_to_sign) + self.assertEqual("OSS ak:kSHKmLxlyEAKtZPkJhG9bZb5k7M=", context.request.headers.get('Authorization')) + + def test_auth_header_2(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://examplebucket.oss-cn-hangzhou.aliyuncs.com/?acl") + request.headers.update( + { + 'Content-MD5': 'eB5eJF1ptWaXm4bijSPyxw==', + 'Content-Type': 'text/html', + 'x-oss-meta-author': 'alice', + 'x-oss-meta-magic': 'abracadabra', + 'x-oss-date': 'Wed, 28 Dec 2022 10:27:41 GMT', + } + ) + + context = SigningContext( + bucket='examplebucket', + key='nelson', + request=request, + credentials=cred, + signing_time=datetime.datetime.fromtimestamp(1702743657), + ) + + signer = SignerV1() + signer.sign(context) + self.assertEqual("PUT\neB5eJF1ptWaXm4bijSPyxw==\ntext/html\nWed, 28 Dec 2022 10:27:41 GMT\nx-oss-date:Wed, 28 Dec 2022 10:27:41 GMT\nx-oss-meta-author:alice\nx-oss-meta-magic:abracadabra\n/examplebucket/nelson?acl", context.string_to_sign) + self.assertEqual("OSS ak:/afkugFbmWDQ967j1vr6zygBLQk=", context.request.headers.get('Authorization')) + + + def test_auth_header_3(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "GET", "http://examplebucket.oss-cn-hangzhou.aliyuncs.com/?resourceGroup&non-resousce=null") + request.headers.update( + { + 'x-oss-date': 'Wed, 28 Dec 2022 10:27:41 GMT', + } + ) + + context = SigningContext( + bucket='examplebucket', + request=request, + credentials=cred, + signing_time=datetime.datetime.fromtimestamp(1702743657), + ) + + signer = SignerV1() + signer.sign(context) + self.assertEqual("GET\n\n\nWed, 28 Dec 2022 10:27:41 GMT\nx-oss-date:Wed, 28 Dec 2022 10:27:41 GMT\n/examplebucket/?resourceGroup", context.string_to_sign) + self.assertEqual("OSS ak:vkQmfuUDyi1uDi3bKt67oemssIs=", context.request.headers.get('Authorization')) + + def test_auth_header_4(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "GET", "http://examplebucket.oss-cn-hangzhou.aliyuncs.com/?resourceGroup&acl") + request.headers.update( + { + 'x-oss-date': 'Wed, 28 Dec 2022 10:27:41 GMT', + } + ) + + context = SigningContext( + bucket='examplebucket', + request=request, + credentials=cred, + signing_time=datetime.datetime.fromtimestamp(1702743657), + ) + + signer = SignerV1() + signer.__init_subclass__() + signer.sign(context) + self.assertEqual("GET\n\n\nWed, 28 Dec 2022 10:27:41 GMT\nx-oss-date:Wed, 28 Dec 2022 10:27:41 GMT\n/examplebucket/?acl&resourceGroup", context.string_to_sign) + self.assertEqual("OSS ak:x3E5TgOvl/i7PN618s5mEvpJDYk=", context.request.headers.get('Authorization')) + + + def test_auth_query(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "GET", "http://bucket.oss-cn-hangzhou.aliyuncs.com/key?versionId=versionId") + request.headers.update() + + context = SigningContext( + bucket='bucket', + key='key', + request=request, + credentials=cred, + ) + context.expiration_time = datetime.datetime.fromtimestamp(1699807420) + context.auth_method_query = True + + signer = SignerV1() + + signer.sign(context) + + queries = _get_url_query(request.url) + + #print(datetime.datetime.fromtimestamp(1699807420)) + + self.assertEqual('versionId', queries.get('versionId')) + self.assertIsNotNone(queries.get('Expires')) + self.assertEqual('ak', queries.get('OSSAccessKeyId')) + self.assertEqual('dcLTea%2BYh9ApirQ8o8dOPqtvJXQ%3D', queries.get('Signature')) + + def test_auth_query_with_token(self) -> None: + provider = StaticCredentialsProvider("ak", "sk", "token") + cred = provider.get_credentials() + request = HttpRequest( + "GET", "http://bucket.oss-cn-hangzhou.aliyuncs.com/key+123?versionId=versionId") + + context = SigningContext( + bucket='bucket', + key='key+123', + request=request, + credentials=cred, + ) + context.expiration_time = datetime.datetime.fromtimestamp(1699808204) + context.auth_method_query = True + + + signer = SignerV1() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertIsNotNone(queries.get('Expires')) + self.assertEqual('ak', queries.get('OSSAccessKeyId')) + self.assertEqual('token', queries.get('security-token')) + self.assertEqual('jzKYRrM5y6Br0dRFPaTGOsbrDhY%3D', queries.get('Signature')) + + def test_auth_query_param_with_token(self) -> None: + provider = StaticCredentialsProvider("ak", "sk", "token") + cred = provider.get_credentials() + request = HttpRequest( + "GET", "http://bucket.oss-cn-hangzhou.aliyuncs.com/key?versionId=versionId") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'application/octet-stream', + } + ) + + context = SigningContext( + bucket='bucket', + key='key', + request=request, + credentials=cred, + ) + context.expiration_time = datetime.datetime.fromtimestamp(1699808204) + context.auth_method_query = True + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + 'response-content-disposition': 'attachment; filename=example.txt' + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV1() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertIsNotNone(queries.get('Expires')) + self.assertEqual('ak', queries.get('OSSAccessKeyId')) + self.assertEqual('attachment%3B%20filename%3Dexample.txt', queries.get('response-content-disposition')) + self.assertEqual('token', queries.get('security-token')) + self.assertEqual('3GJoEOv5LX2ASp0HJk%2FhAk%2BqLJc%3D', queries.get('Signature')) + + +def _get_url_query(url: str): + encoded_pairs = {} + parts = urlsplit(url) + if parts.query: + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + encoded_pairs[key] = value + return encoded_pairs diff --git a/tests/unit/signer/test_v4.py b/tests/unit/signer/test_v4.py new file mode 100644 index 0000000..eca32d4 --- /dev/null +++ b/tests/unit/signer/test_v4.py @@ -0,0 +1,432 @@ +# pylint: skip-file +import unittest +import datetime +from urllib.parse import urlencode, quote, urlsplit +from alibabacloud_oss_v2.signer import SignerV4 +from alibabacloud_oss_v2.credentials import StaticCredentialsProvider +from alibabacloud_oss_v2.types import HttpRequest, SigningContext + +class TestSignerV4(unittest.TestCase): + def test_auth_header(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'text/plain', + 'x-oss-content-sha256': 'UNSIGNED-PAYLOAD', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702743657), + ) + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + auth_pat = 'OSS4-HMAC-SHA256 Credential=ak/20231216/cn-hangzhou/oss/aliyun_v4_request,Signature=e21d18daa82167720f9b1047ae7e7f1ce7cb77a31e8203a7d5f4624fa0284afe' + self.assertEqual(auth_pat, context.request.headers.get('Authorization')) + + + def test_auth_header_with_token(self) -> None: + provider = StaticCredentialsProvider( + 'ak', 'sk', security_token='token') + cred = provider.get_credentials() + request = HttpRequest( + 'PUT', 'http://bucket.oss-cn-hangzhou.aliyuncs.com') + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'text/plain', + 'x-oss-content-sha256': 'UNSIGNED-PAYLOAD', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702784856), + ) + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + auth_pat = 'OSS4-HMAC-SHA256 Credential=ak/20231217/cn-hangzhou/oss/aliyun_v4_request,Signature=b94a3f999cf85bcdc00d332fbd3734ba03e48382c36fa4d5af5df817395bd9ea' + self.assertEqual( + auth_pat, context.request.headers.get('Authorization')) + + def test_auth_header_with_additional_headers(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'text/plain', + 'x-oss-content-sha256': 'UNSIGNED-PAYLOAD', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702747512), + additional_headers={'ZAbc', 'abc'} + ) + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + auth_pat = 'OSS4-HMAC-SHA256 Credential=ak/20231216/cn-hangzhou/oss/aliyun_v4_request,AdditionalHeaders=abc;zabc,Signature=4a4183c187c07c8947db7620deb0a6b38d9fbdd34187b6dbaccb316fa251212f' + self.assertEqual( + auth_pat, context.request.headers.get('Authorization')) + + # with default signed header + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'text/plain', + 'x-oss-content-sha256': 'UNSIGNED-PAYLOAD', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702747512), + additional_headers={'x-oss-no-exist', 'ZAbc', 'x-oss-head1', 'abc'} + ) + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + auth_pat = 'OSS4-HMAC-SHA256 Credential=ak/20231216/cn-hangzhou/oss/aliyun_v4_request,AdditionalHeaders=abc;zabc,Signature=4a4183c187c07c8947db7620deb0a6b38d9fbdd34187b6dbaccb316fa251212f' + self.assertEqual( + auth_pat, context.request.headers.get('Authorization')) + + def test_auth_query(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'application/octet-stream', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702781677), + ) + context.expiration_time = datetime.datetime.fromtimestamp(1702782276) + context.auth_method_query = True + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertEqual('OSS4-HMAC-SHA256', + queries.get('x-oss-signature-version')) + self.assertEqual('599', queries.get('x-oss-expires')) + self.assertEqual( + 'ak%2F20231217%2Fcn-hangzhou%2Foss%2Faliyun_v4_request', + queries.get('x-oss-credential')) + self.assertEqual( + 'a39966c61718be0d5b14e668088b3fa07601033f6518ac7b523100014269c0fe', + queries.get('x-oss-signature')) + self.assertEqual('', queries.get('x-oss-additional-headers', '')) + + def test_auth_query_with_token(self) -> None: + provider = StaticCredentialsProvider("ak", "sk", "token") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'application/octet-stream', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702785388), + ) + context.expiration_time = datetime.datetime.fromtimestamp(1702785987) + context.auth_method_query = True + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertEqual('OSS4-HMAC-SHA256', + queries.get('x-oss-signature-version')) + self.assertEqual('20231217T035628Z', queries.get('x-oss-date')) + self.assertEqual('599', queries.get('x-oss-expires')) + self.assertEqual( + 'ak%2F20231217%2Fcn-hangzhou%2Foss%2Faliyun_v4_request', + queries.get('x-oss-credential')) + self.assertEqual( + '3817ac9d206cd6dfc90f1c09c00be45005602e55898f26f5ddb06d7892e1f8b5', + queries.get('x-oss-signature')) + self.assertEqual('', queries.get('x-oss-additional-headers', '')) + + def test_auth_query_with_additional_headers(self) -> None: + provider = StaticCredentialsProvider("ak", "sk") + cred = provider.get_credentials() + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'application/octet-stream', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702783809), + additional_headers={'ZAbc', 'abc'} + ) + context.expiration_time = datetime.datetime.fromtimestamp(1702784408) + context.auth_method_query = True + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertEqual('OSS4-HMAC-SHA256', + queries.get('x-oss-signature-version')) + self.assertEqual('20231217T033009Z', queries.get('x-oss-date')) + self.assertEqual('599', queries.get('x-oss-expires')) + self.assertEqual( + 'ak%2F20231217%2Fcn-hangzhou%2Foss%2Faliyun_v4_request', + queries.get('x-oss-credential')) + self.assertEqual( + '6bd984bfe531afb6db1f7550983a741b103a8c58e5e14f83ea474c2322dfa2b7', + queries.get('x-oss-signature')) + self.assertEqual('abc%3Bzabc', queries.get( + 'x-oss-additional-headers', '')) + + # with default signed header + request = HttpRequest( + "PUT", "http://bucket.oss-cn-hangzhou.aliyuncs.com") + request.headers.update( + { + 'x-oss-head1': 'value', + 'abc': 'value', + 'ZAbc': 'value', + 'XYZ': 'value', + 'content-type': 'application/octet-stream', + } + ) + + context = SigningContext( + bucket='bucket', + key='1234+-/123/1.txt', + request=request, + credentials=cred, + product='oss', + region='cn-hangzhou', + signing_time=datetime.datetime.fromtimestamp(1702783809), + additional_headers={'x-oss-no-exist', 'ZAbc', 'x-oss-head1', 'abc'} + ) + context.expiration_time = datetime.datetime.fromtimestamp(1702784408) + context.auth_method_query = True + + parameters = { + 'param1': 'value1', + '+param1': 'value3', + '|param1': 'value4', + '+param2': '', + '|param2': '', + 'param2': '', + } + query = urlencode(parameters, quote_via=quote) + + request.url = request.url + "?" + query + + signer = SignerV4() + + signer.sign(context) + + queries = _get_url_query(request.url) + + self.assertEqual('OSS4-HMAC-SHA256', + queries.get('x-oss-signature-version')) + self.assertEqual('20231217T033009Z', queries.get('x-oss-date')) + self.assertEqual('599', queries.get('x-oss-expires')) + self.assertEqual( + 'ak%2F20231217%2Fcn-hangzhou%2Foss%2Faliyun_v4_request', + queries.get('x-oss-credential')) + self.assertEqual( + '6bd984bfe531afb6db1f7550983a741b103a8c58e5e14f83ea474c2322dfa2b7', + queries.get('x-oss-signature')) + self.assertEqual('abc%3Bzabc', queries.get( + 'x-oss-additional-headers', '')) + + +def _get_url_query(url: str): + encoded_pairs = {} + parts = urlsplit(url) + if parts.query: + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + encoded_pairs[key] = value + return encoded_pairs diff --git a/tests/unit/test_checkpoint.py b/tests/unit/test_checkpoint.py new file mode 100644 index 0000000..f57d6ed --- /dev/null +++ b/tests/unit/test_checkpoint.py @@ -0,0 +1,409 @@ +# pylint: skip-file +import os +import json +import unittest +from alibabacloud_oss_v2.types import CaseInsensitiveDict +from alibabacloud_oss_v2 import checkpoint +from alibabacloud_oss_v2 import models +from alibabacloud_oss_v2 import defaults +from alibabacloud_oss_v2 import utils + +def _remove_slice(filename): + try: + os.remove(filename) + except (OSError, ValueError): + pass + +def _write_file(filename, data): + with open(filename, 'wb') as f: + f.write(data) + +class TestCheckpoint(unittest.TestCase): + def test_download_checkpoint(self): + request = models.GetObjectRequest( + bucket='bucket', + key='key' + ) + + dest_filepath = "download-file-no-surfix" + cpdir = "." + + headers = CaseInsensitiveDict({ + "Etag": "\"D41D8CD98F00B204E9800998ECF8****\"", + "Content-Length": "344606", + "Last-Modified": "Fri, 24 Feb 2012 06:07:48 GMT", + }) + part_size = defaults.DEFAULT_DOWNLOAD_PART_SIZE + cp = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + self.assertIsNotNone(cp) + self.assertEqual("\"D41D8CD98F00B204E9800998ECF8****\"", cp.cp_info["Data"]["ObjectMeta"]["ETag"]) + self.assertEqual("Fri, 24 Feb 2012 06:07:48 GMT", cp.cp_info["Data"]["ObjectMeta"]["LastModified"]) + self.assertEqual(344606, cp.cp_info["Data"]["ObjectMeta"]["Size"]) + + self.assertEqual("oss://bucket/key", cp.cp_info["Data"]["ObjectInfo"]["Name"]) + self.assertEqual("", cp.cp_info["Data"]["ObjectInfo"]["VersionId"]) + self.assertEqual("", cp.cp_info["Data"]["ObjectInfo"]["Range"]) + + self.assertEqual(defaults.CHECKPOINT_MAGIC, cp.cp_info["Magic"]) + self.assertEqual(None, cp.cp_info.get("MD5", None)) + + self.assertEqual(dest_filepath, cp.cp_info["Data"]["FilePath"]) + self.assertEqual(defaults.DEFAULT_DOWNLOAD_PART_SIZE, cp.cp_info["Data"]["PartSize"]) + + #has version id + request = models.GetObjectRequest( + bucket='bucket', + key='key', + version_id='id' + ) + cp_vid = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + self.assertEqual("oss://bucket/key", cp_vid.cp_info["Data"]["ObjectInfo"]["Name"]) + self.assertEqual("id", cp_vid.cp_info["Data"]["ObjectInfo"]["VersionId"]) + self.assertEqual("", cp_vid.cp_info["Data"]["ObjectInfo"]["Range"]) + + #has range + request = models.GetObjectRequest( + bucket='bucket', + key='key', + version_id='id', + range_header='bytes=1-10' + ) + cp_range = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + self.assertEqual("oss://bucket/key", cp_range.cp_info["Data"]["ObjectInfo"]["Name"]) + self.assertEqual("id", cp_range.cp_info["Data"]["ObjectInfo"]["VersionId"]) + self.assertEqual("bytes=1-10", cp_range.cp_info["Data"]["ObjectInfo"]["Range"]) + + #with other destFilePath + dest_filepath1 = dest_filepath + "-123" + cp_range_dest = checkpoint.DownloadCheckpoint(request, dest_filepath1, cpdir, headers, part_size) + self.assertEqual(dest_filepath1, cp_range_dest.cp_info["Data"]["FilePath"]) + self.assertNotEqual(cp_range.cp_filepath, cp_range_dest.cp_filepath) + + #check dump + cp.dump() + self.assertTrue(os.path.isfile(cp.cp_filepath)) + content = b'' + with open(cp.cp_filepath, 'rb') as f: + content = f.read() + self.assertTrue(len(content) > 0) + info = json.loads(content) + + self.assertEqual("\"D41D8CD98F00B204E9800998ECF8****\"", info["Data"]["ObjectMeta"]["ETag"]) + self.assertEqual("Fri, 24 Feb 2012 06:07:48 GMT", info["Data"]["ObjectMeta"]["LastModified"]) + self.assertEqual(344606, info["Data"]["ObjectMeta"]["Size"]) + + self.assertEqual("oss://bucket/key", info["Data"]["ObjectInfo"]["Name"]) + self.assertEqual("", info["Data"]["ObjectInfo"]["VersionId"]) + self.assertEqual("", info["Data"]["ObjectInfo"]["Range"]) + + self.assertEqual(defaults.CHECKPOINT_MAGIC, info["Magic"]) + self.assertEqual(32, len(info["MD5"])) + + self.assertEqual(dest_filepath, info["Data"]["FilePath"]) + self.assertEqual(part_size, info["Data"]["PartSize"]) + + #check load + cp.load() + self.assertEqual(True, cp.loaded) + + #check valid + self.assertEqual(True, cp._is_valid()) + + #check complete + self.assertEqual(True, os.path.isfile(cp.cp_filepath)) + cp.remove() + self.assertEqual(False, os.path.exists(cp.cp_filepath)) + + #load not match + cp = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + self.assertEqual(False, cp.loaded) + not_match = '{"Magic":"92611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"2f132b5bf65640868a47cb52c57492c8","Data":{"ObjectInfo":{"Name":"oss://bucket/key","VersionId":"","Range":""},"ObjectMeta":{"Size":344606,"LastModified":"Fri, 24 Feb 2012 06:07:48 GMT","ETag":"D41D8CD98F00B204E9800998ECF8****"},"FilePath":"gthnjXGQ-no-surfix","PartSize":5242880,"DownloadInfo":{"Offset":5242880,"CRC64":0}}}' + with open(cp.cp_filepath, 'wb') as f: + f.write(not_match.encode()) + self.assertEqual(True, os.path.isfile(cp.cp_filepath)) + cp.load() + self.assertEqual(False, cp.loaded) + self.assertEqual(False, os.path.exists(cp.cp_filepath)) + + + def test_download_checkpoint_invalid_cppath(self): + request = models.GetObjectRequest( + bucket='bucket', + key='key' + ) + + dest_filepath = "checkpoint_invalid_cppath-no-surfix" + cpdir = "./invliad-dir/" + + headers = CaseInsensitiveDict({ + "Etag": "\"D41D8CD98F00B204E9800998ECF8****\"", + "Content-Length": "344606", + "Last-Modified": "Fri, 24 Feb 2012 06:07:48 GMT", + }) + part_size = defaults.DEFAULT_DOWNLOAD_PART_SIZE + cp = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + + self.assertIsNotNone(cp) + self.assertEqual(dest_filepath, cp.cp_info["Data"]["FilePath"]) + self.assertEqual("./invliad-dir", cp.cp_dirpath) + self.assertIn("invliad-dir", cp.cp_filepath) + + #dump fail + done = cp.dump() + self.assertFalse(done) + + #load fail + try: + cp.load() + self.fail("should not here") + except Exception as err: + self.assertIn("Invaid checkpoint dir", str(err)) + + def test_download_checkpoint_valid(self): + request = models.GetObjectRequest( + bucket='bucket', + key='key' + ) + + dest_filepath = "gthnjXGQ-no-surfix" + cpdir = "." + + headers = CaseInsensitiveDict({ + "Etag": "\"D41D8CD98F00B204E9800998ECF8****\"", + "Content-Length": "344606", + "Last-Modified": "Fri, 24 Feb 2012 06:07:48 GMT", + }) + + part_size = 5 * 1024 *1024 + cp = checkpoint.DownloadCheckpoint(request, dest_filepath, cpdir, headers, part_size) + + _remove_slice(dest_filepath) + self.assertEqual(0, cp.doffset) + #cpdata = '{"Magic":"92611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"cc615a3b3fca2766786669a68895f3ed","Data":{"ObjectInfo":{"Name":"oss://bucket/key","VersionId":"","Range":""},"ObjectMeta":{"Size":344606,"LastModified":"Fri, 24 Feb 2012 06:07:48 GMT","ETag":"\\"D41D8CD98F00B204E9800998ECF8****\\""},"FilePath":"gthnjXGQ-no-surfix","PartSize":5242880,"DownloadInfo":{"Offset":5242880,"CRC64":0}}}' + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "cc615a3b3fca2766786669a68895f3ed", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 5242880, "DownloadInfo": {"Offset": 5242880, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertTrue(cp._is_valid()) + self.assertEqual(5242880, cp.doffset) + + #md5 fail + cpdata = '{"Magic":"92611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"4f132b5bf65640868a47cb52c57492c8","Data":{"ObjectInfo":{"Name":"oss://bucket/key","VersionId":"","Range":""},"ObjectMeta":{"Size":344606,"LastModified":"Fri, 24 Feb 2012 06:07:48 GMT","ETag":"\\"D41D8CD98F00B204E9800998ECF8****\\""},"FilePath":"gthnjXGQ-no-surfix","PartSize":5242880,"DownloadInfo":{"Offset":5242880,"CRC64":0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + #Magic fail + cpdata = '{"Magic":"82611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"cc615a3b3fca2766786669a68895f3ed","Data":{"ObjectInfo":{"Name":"oss://bucket/key","VersionId":"","Range":""},"ObjectMeta":{"Size":344606,"LastModified":"Fri, 24 Feb 2012 06:07:48 GMT","ETag":"\\"D41D8CD98F00B204E9800998ECF8****\\""},"FilePath":"gthnjXGQ-no-surfix","PartSize":5242880,"DownloadInfo":{"Offset":5242880,"CRC64":0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # invalid cp format + cpdata = '"Magic":"92611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"cc615a3b3fca2766786669a68895f3ed","Data":{"ObjectInfo":{"Name":"oss://bucket/key","VersionId":"","Range":""},"ObjectMeta":{"Size":344606,"LastModified":"Fri, 24 Feb 2012 06:07:48 GMT","ETag":"\\"D41D8CD98F00B204E9800998ECF8****\\""},"FilePath":"gthnjXGQ-no-surfix","PartSize":5242880,"DownloadInfo":{"Offset":5242880,"CRC64":0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # ObjectInfo not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "67c7658602742baa0a5b6788bfbb9b8f", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "123", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 5242880, "DownloadInfo": {"Offset": 5242880, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # ObjectMeta not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "574e30360fd1575dcbba5831ae0a3e30", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 3446061, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 5242880, "DownloadInfo": {"Offset": 5242880, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # FilePath not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "8b328023db46b845d6cfc60ce4b5b7cd", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix-1", "PartSize": 5242880, "DownloadInfo": {"Offset": 5242880, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # PartSize not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "3a4412930e3598bdc6ee92c12376e597", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 2621440, "DownloadInfo": {"Offset": 5242880, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # Offset invalid + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "3834d2b67de65d7ccc5775f419e1ec62", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 5242880, "DownloadInfo": {"Offset": -1, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # Offset % + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "d4b74aa6f9e6311925956684619e824c", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 5242880, "DownloadInfo": {"Offset": 1, "CRC64": 0}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # check sum equal + cp.cp_info["Data"]["PartSize"] = 6 + cp.verify_data = True + data = "hello world!" + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "f58be84fc61d45ef092d056b200e85b5", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 6, "DownloadInfo": {"Offset": 12, "CRC64": 9548687815775124833}}}' + _write_file(dest_filepath, data.encode()) + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertTrue(cp._is_valid()) + self.assertEqual(12, cp.doffset) + self.assertEqual(9548687815775124833, cp.dcrc64) + + # check sum not equal + cp.cp_info["Data"]["PartSize"] = 6 + cp.verify_data = True + data = "hello world!" + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "96b19210cc0f66aa0e4afec872bffc8b", "Data": {"ObjectInfo": {"Name": "oss://bucket/key", "VersionId": "", "Range": ""}, "ObjectMeta": {"Size": 344606, "LastModified": "Fri, 24 Feb 2012 06:07:48 GMT", "ETag": "\\"D41D8CD98F00B204E9800998ECF8****\\""}, "FilePath": "gthnjXGQ-no-surfix", "PartSize": 6, "DownloadInfo": {"Offset": 12, "CRC64": 9548687815775124834}}}' + _write_file(dest_filepath, data.encode()) + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + _remove_slice(dest_filepath) + _remove_slice(cp.cp_filepath) + + def test_upload_checkpoint(self): + request = models.PutObjectRequest( + bucket='bucket', + key='key' + ) + + src_filepath = "upload_checkpoint-no-surfix" + cpdir = "." + fileinfo = os.stat("./tests/data/example.jpg") + part_size = defaults.DEFAULT_UPLOAD_PART_SIZE + cp = checkpoint.UploadCheckpoint(request, src_filepath, cpdir, fileinfo, part_size) + + self.assertIsNotNone(cp) + self.assertEqual(fileinfo.st_mtime, cp.cp_info["Data"]["FileMeta"]["LastModified"]) + self.assertEqual(fileinfo.st_size, cp.cp_info["Data"]["FileMeta"]["Size"]) + + self.assertEqual("oss://bucket/key", cp.cp_info["Data"]["ObjectInfo"]["Name"]) + + self.assertEqual(defaults.CHECKPOINT_MAGIC, cp.cp_info["Magic"]) + self.assertEqual(None, cp.cp_info.get("MD5", None)) + + self.assertEqual(src_filepath, cp.cp_info["Data"]["FilePath"]) + self.assertEqual(defaults.DEFAULT_UPLOAD_PART_SIZE, cp.cp_info["Data"]["PartSize"]) + + #check dump + cp.upload_id = "upload-id" + done = cp.dump() + self.assertEqual(True, done) + self.assertTrue(os.path.isfile(cp.cp_filepath)) + content = b'' + with open(cp.cp_filepath, 'rb') as f: + content = f.read() + self.assertTrue(len(content) > 0) + info = json.loads(content) + + self.assertEqual(fileinfo.st_mtime, info["Data"]["FileMeta"]["LastModified"]) + self.assertEqual(fileinfo.st_size, info["Data"]["FileMeta"]["Size"]) + + self.assertEqual("oss://bucket/key", info["Data"]["ObjectInfo"]["Name"]) + + self.assertEqual(src_filepath, info["Data"]["FilePath"]) + self.assertEqual(defaults.DEFAULT_UPLOAD_PART_SIZE, info["Data"]["PartSize"]) + + self.assertEqual("upload-id", info["Data"]["UploadInfo"]["UploadId"]) + + #check load + cp.load() + self.assertEqual(True, cp.loaded) + + #check valid + self.assertEqual(True, cp._is_valid()) + + #check complete + self.assertEqual(True, os.path.isfile(cp.cp_filepath)) + cp.remove() + self.assertEqual(False, os.path.isfile(cp.cp_filepath)) + + #load not match + cp = checkpoint.UploadCheckpoint(request, src_filepath, cpdir, fileinfo, part_size) + self.assertEqual(False, cp.loaded) + not_match = '{"Magic":"92611BED-89E2-46B6-89E5-72F273D4B0A3","MD5":"5ff2e8fbddc007157488c1087105f6d2","Data":{"FilePath":"vhetHfkY-no-surfix","FileMeta":{"Size":100,"LastModified":"2024-01-08 16:46:27.7178907 +0800 CST m=+0.014509001"},"ObjectInfo":{"Name":"oss://bucket/key"},"PartSize":5242880,"UploadInfo":{"UploadId":""}}}' + _write_file(cp.cp_filepath, not_match.encode()) + self.assertEqual(True, os.path.isfile(cp.cp_filepath)) + cp.load() + self.assertEqual(False, cp.loaded) + self.assertEqual(False, os.path.isfile(cp.cp_filepath)) + + + def test_upload_checkpoint_invalid_cppath(self): + request = models.PutObjectRequest( + bucket='bucket', + key='key' + ) + + src_filepath = "upload_checkpoint-no-surfix" + cpdir = "./invliad-dir/" + fileinfo = os.stat("./tests/data/example.jpg") + part_size = defaults.DEFAULT_UPLOAD_PART_SIZE + cp = checkpoint.UploadCheckpoint(request, src_filepath, cpdir, fileinfo, part_size) + self.assertEqual(src_filepath, cp.cp_info["Data"]["FilePath"]) + self.assertEqual("./invliad-dir", cp.cp_dirpath) + self.assertIn("invliad-dir", cp.cp_filepath) + + done = cp.dump() + self.assertEqual(False, done) + + #load fail + try: + cp.load() + self.fail("should not here") + except Exception as err: + self.assertIn("Invaid checkpoint dir", str(err)) + + def test_upload_checkpoint_valid(self): + request = models.PutObjectRequest( + bucket='bucket', + key='key' + ) + + src_filepath = "athnjXGQ-no-surfix" + cpdir = "." + fileinfo = os.stat_result((33206, 1688849865056404,13015084316502127928, 1, 0, 0, 21839, 1725449727, 1724840443, 1724840443), {}) + part_size = 5 * 1024 *1024 + cp = checkpoint.UploadCheckpoint(request, src_filepath, cpdir, fileinfo, part_size) + + _remove_slice(cp.cp_filepath) + self.assertEqual('', cp.upload_id) + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "4e42d5e63ed9a59bb896e55a794320e7", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertTrue(cp._is_valid()) + self.assertEqual('upload-id', cp.upload_id) + + # md5 fail + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "1e42d5e63ed9a59bb896e55a794320e7", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # Magic fail + cpdata = '{"Magic": "82611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "4e42d5e63ed9a59bb896e55a794320e7", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # invalid cp format + cpdata = '"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "4e42d5e63ed9a59bb896e55a794320e7", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # FilePath not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "0ad12fa896c688cf90de0961423e253d", "Data": {"FilePath": "athnjXGQ-no-surfix-1", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # FileMeta not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "aa5ed50adf5d52cf978ee29aea958d0f", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840444}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # ObjectInfo not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "44347464d321317bbc86c9d17dbcca9e", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key-1"}, "PartSize": 5242880, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # PartSize not equal + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "18127e61e60aa011f38d5cba64b1fe47", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 52428800, "UploadInfo": {"UploadId": "upload-id"}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + # uploadId invalid + cpdata = '{"Magic": "92611BED-89E2-46B6-89E5-72F273D4B0A3", "MD5": "38dd6a5cde64c5b0499d70b70fe5899c", "Data": {"FilePath": "athnjXGQ-no-surfix", "FileMeta": {"Size": 21839, "LastModified": 1724840443}, "ObjectInfo": {"Name": "oss://bucket/key"}, "PartSize": 5242880, "UploadInfo": {"UploadId": ""}}}' + _write_file(cp.cp_filepath, cpdata.encode()) + self.assertFalse(cp._is_valid()) + + _remove_slice(src_filepath) + _remove_slice(cp.cp_filepath) \ No newline at end of file diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py new file mode 100644 index 0000000..e1e722a --- /dev/null +++ b/tests/unit/test_client.py @@ -0,0 +1,179 @@ +# pylint: skip-file +import tempfile +import unittest +from alibabacloud_oss_v2 import models, config, client, credentials, exceptions +from alibabacloud_oss_v2.types import HttpRequest, HttpResponse, HttpClient +from . import MockHttpResponse, MockHttpClient + + +def _mock_client(request_fn, response_fn, **kwargs): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.credentials_provider = credentials.AnonymousCredentialsProvider() + cfg.http_client = MockHttpClient( + request_fn=request_fn, + response_fn=response_fn, + kwargs=kwargs + ) + return client.Client(cfg) + +def _get_tempfile() -> str: + filename = '' + with tempfile.TemporaryFile('w+b', delete=False) as f: + filename = f.name + return filename + + +progress_save_n = 0 +def _progress_fn(n, _written, total): + global progress_save_n + progress_save_n += n + +class TestClientBase(unittest.TestCase): + def setUp(self): + self.set_requestFunc(None) + self.set_responseFunc(None) + + def tearDown(self): + pass + + @classmethod + def setUpClass(cls): + cls.request_dump: HttpRequest = None + cls.client = _mock_client(cls.requestFunc, cls.responseFunc) + cls.invoke_request = None + cls.invoke_response = None + + @classmethod + def tearDownClass(cls): + pass + + @classmethod + def requestFunc(cls, request: HttpRequest): + cls.request_dump = request + if cls.invoke_request is not None: + cls.invoke_request(request) + + @classmethod + def responseFunc(cls) -> MockHttpResponse: + if cls.invoke_response is not None: + return cls.invoke_response() + + return MockHttpResponse( + status_code=200, + reason='OK', + headers={'x-oss-request-id': 'id-1234'}, + body='' + ) + + @classmethod + def set_requestFunc(cls, fn): + cls.invoke_request = fn + + @classmethod + def set_responseFunc(cls, fn): + cls.invoke_response = fn + + @classmethod + def response_403_InvalidAccessKeyId(cls) -> MockHttpResponse: + err_xml = r''' + + InvalidAccessKeyId + The OSS Access Key Id you provided does not exist in our records. + id-1234 + oss-cn-hangzhou.aliyuncs.com + ak + 0002-00000902 + https://api.aliyun.com/troubleshoot?q=0002-00000902 + + ''' + return MockHttpResponse( + status_code=403, + reason='Forbidden', + headers={ + 'Server': 'AliyunOSS', + 'Date': 'Tue, 23 Jul 2024 13:01:06 GMT', + 'Content-Type': 'application/xml', + 'x-oss-ec': '0002-00000902', + 'x-oss-request-id': 'id-1234', + }, + body=err_xml.encode() + ) + +class TestClientExtension(TestClientBase): + def test_get_object_to_file(self): + def response_200() -> MockHttpResponse: + return MockHttpResponse( + status_code=200, + reason='OK', + headers={ + 'Server': 'AliyunOSS', + 'Date': 'Tue, 03 Sep 2024 06:33:10 GMT', + 'Content-Length': '11', + 'Content-MD5': 'XrY7u+Ae7tCTyyK7j1rNww==', + 'x-oss-request-id': 'id-1234', + 'x-oss-hash-crc64ecma': '5981764153023615706', + }, + body=b'hello world' + ) + + self.set_responseFunc(response_200) + request = models.GetObjectRequest( + bucket='bucket', + key='123%456+789#0', + ) + filepath = _get_tempfile() + result = self.client.get_object_to_file(request, filepath) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/123%25456%2B789%230', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + self.assertEqual('5981764153023615706', result.hash_crc64) + + data = b'' + with open(filepath, 'rb') as f: + data = f.read() + + self.assertEqual(b'hello world', data) + + #progress + global progress_save_n + progress_save_n = 0 + request = models.GetObjectRequest( + bucket='bucket', + key='123%456+789#0', + progress_fn=_progress_fn, + ) + filepath = _get_tempfile() + result = self.client.get_object_to_file(request, filepath) + self.assertEqual('https://bucket.oss-cn-hangzhou.aliyuncs.com/123%25456%2B789%230', self.request_dump.url) + self.assertEqual('GET', self.request_dump.method) + self.assertEqual('5981764153023615706', result.hash_crc64) + self.assertEqual(11, progress_save_n) + + def test_get_object_to_file_crc_fail(self): + def response_200() -> MockHttpResponse: + return MockHttpResponse( + status_code=200, + reason='OK', + headers={ + 'Server': 'AliyunOSS', + 'Date': 'Tue, 03 Sep 2024 06:33:10 GMT', + 'Content-Length': '11', + 'Content-MD5': 'XrY7u+Ae7tCTyyK7j1rNww==', + 'x-oss-request-id': 'id-1234', + 'x-oss-hash-crc64ecma': '5981764153023615707', + }, + body=b'hello world' + ) + + self.set_responseFunc(response_200) + request = models.GetObjectRequest( + bucket='bucket', + key='123%456+789#0', + ) + filepath = _get_tempfile() + try: + self.client.get_object_to_file(request, filepath) + self.fail('should not here') + except exceptions.InconsistentError as err: + self.assertIn('crc is inconsistent, client 5981764153023615706, server 5981764153023615707', str(err)) + diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 0000000..cbe6dd8 --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,6 @@ +import alibabacloud_oss_v2.config + +import unittest + +class TestConfig(unittest.TestCase): + """TODO""" diff --git a/tests/unit/test_crc.py b/tests/unit/test_crc.py new file mode 100644 index 0000000..dab3a33 --- /dev/null +++ b/tests/unit/test_crc.py @@ -0,0 +1,30 @@ +# pylint: skip-file +import unittest +from alibabacloud_oss_v2 import crc + +class TestCrc(unittest.TestCase): + def test_crc64_combine(self): + _POLY = 0x142F0E1EBA9EA3693 + _XOROUT = 0XFFFFFFFFFFFFFFFF + + string_a = b'12345' + string_b = b'67890' + + crc64_a = crc.Crc64(0) + crc64_a.update(string_a) + crc1 = crc64_a.sum64() + + crc64_b = crc.Crc64(0) + crc64_b.update(string_b) + crc2 = crc64_b.sum64() + + crc_combine = crc.Crc64.combine(crc1, crc2, len(string_b)) + + crc64_c = crc.Crc64(0) + crc64_c.update(string_a + string_b) + crc_raw = crc64_c.sum64() + + self.assertEqual(crc_combine, crc_raw) + + + diff --git a/tests/unit/test_io_utils.py b/tests/unit/test_io_utils.py new file mode 100644 index 0000000..068d310 --- /dev/null +++ b/tests/unit/test_io_utils.py @@ -0,0 +1,39 @@ +# pylint: skip-file +import io +import unittest +from alibabacloud_oss_v2 import io_utils + +class TestIoUtils(unittest.TestCase): + def test_limit_reader(self): + rb = io.StringIO("hello") + r = io_utils.LimitReader(rb, 2) + d = r.read() + self.assertEqual('he', d) + + rb = io.StringIO("hello") + r = io_utils.LimitReader(rb, 3) + d = r.read(10) + self.assertEqual('hel', d) + + rb = io.StringIO("hello") + r = io_utils.LimitReader(rb, 3) + d = r.read(1) + self.assertEqual('h', d) + + + rb = io.BytesIO(b"hello") + r = io_utils.LimitReader(rb, 2) + d = r.read() + self.assertEqual(b'he', d) + + rb = io.BytesIO(b"hello") + r = io_utils.LimitReader(rb, 3) + d = r.read(10) + self.assertEqual(b'hel', d) + + rb = io.BytesIO(b"hello") + r = io_utils.LimitReader(rb, 3) + d = r.read(1) + self.assertEqual(b'h', d) + + diff --git a/tests/unit/test_presign.py b/tests/unit/test_presign.py new file mode 100644 index 0000000..95c6a47 --- /dev/null +++ b/tests/unit/test_presign.py @@ -0,0 +1,351 @@ +# pylint: skip-file +import datetime +import unittest +from typing import cast +from urllib.parse import quote +from alibabacloud_oss_v2 import models as model +from alibabacloud_oss_v2 import config +from alibabacloud_oss_v2 import credentials +from alibabacloud_oss_v2.client import Client +from alibabacloud_oss_v2 import exceptions + + +class TestPresign(unittest.TestCase): + def test_presign_v1(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v1' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key' + ) + + expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=1) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + + expires = datetime.timedelta(minutes=50) + expiration = datetime.datetime.now(datetime.timezone.utc) + expires + result = client.presign(request, expires=expires) + self.assertEqual('GET', result.method) + self.assertLess((result.expiration - expiration).seconds, 2) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(result.expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + + def test_presign_token_v1(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v1' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk", "token") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key' + ) + + expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=1) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + self.assertIn("security-token=token", result.url) + + def test_presign_with_header_v1(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v1' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk", "token") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key', + headers={"Content-Type": "application/octet-stream"}, + ) + + expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=1) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(1, len(result.signed_headers)) + self.assertEqual("application/octet-stream", result.signed_headers.get('Content-Type')) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + self.assertIn("security-token=token", result.url) + + def test_presign_v4(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v4' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key' + ) + + timedelta = datetime.timedelta(hours=1) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + credential = f'ak/{datetime_now.strftime("%Y%m%d")}/cn-hangzhou/oss/aliyun_v4_request' + + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn(f'x-oss-date={datetime_now.strftime("%Y%m%dT%H%M%SZ")}', result.url) + self.assertTrue(f'x-oss-expires={int(timedelta.seconds)}' in result.url or f'x-oss-expires={int(timedelta.seconds-1)}' in result.url) + self.assertIn("x-oss-signature=", result.url) + self.assertIn(f'x-oss-credential={quote(credential, safe="")}', result.url) + self.assertIn("x-oss-signature-version=OSS4-HMAC-SHA256", result.url) + + + timedelta = datetime.timedelta(minutes=50) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + credential = f'ak/{datetime_now.strftime("%Y%m%d")}/cn-hangzhou/oss/aliyun_v4_request' + + result = client.presign(request, expires=timedelta) + + self.assertEqual('GET', result.method) + self.assertLess((result.expiration - expiration).seconds, 2) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn(f'x-oss-date={datetime_now.strftime("%Y%m%dT%H%M%SZ")}', result.url) + self.assertTrue(f'x-oss-expires={int(timedelta.seconds)}' in result.url or f'x-oss-expires={int(timedelta.seconds-1)}' in result.url) + self.assertIn("x-oss-signature=", result.url) + self.assertIn(f'x-oss-credential={quote(credential, safe="")}', result.url) + self.assertIn("x-oss-signature-version=OSS4-HMAC-SHA256", result.url) + + + def test_presign_token_v4(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v4' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk", "token") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key' + ) + + timedelta = datetime.timedelta(hours=1) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + credential = f'ak/{datetime_now.strftime("%Y%m%d")}/cn-hangzhou/oss/aliyun_v4_request' + + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn(f'x-oss-date={datetime_now.strftime("%Y%m%dT%H%M%SZ")}', result.url) + self.assertTrue(f'x-oss-expires={int(timedelta.seconds)}' in result.url or f'x-oss-expires={int(timedelta.seconds-1)}' in result.url) + self.assertIn("x-oss-signature=", result.url) + self.assertIn(f'x-oss-credential={quote(credential, safe="")}', result.url) + self.assertIn("x-oss-signature-version=OSS4-HMAC-SHA256", result.url) + self.assertIn("x-oss-security-token=token", result.url) + + + def test_presign_with_header_v4(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v4' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk", "token") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key', + headers={"Content-Type": "application/octet-stream"}, + ) + + timedelta = datetime.timedelta(hours=1) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + credential = f'ak/{datetime_now.strftime("%Y%m%d")}/cn-hangzhou/oss/aliyun_v4_request' + + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(1, len(result.signed_headers)) + self.assertEqual("application/octet-stream", result.signed_headers.get('Content-Type')) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn(f'x-oss-date={datetime_now.strftime("%Y%m%dT%H%M%SZ")}', result.url) + self.assertTrue(f'x-oss-expires={int(timedelta.seconds)}' in result.url or f'x-oss-expires={int(timedelta.seconds-1)}' in result.url) + self.assertIn("x-oss-signature=", result.url) + self.assertIn(f'x-oss-credential={quote(credential, safe="")}', result.url) + self.assertIn("x-oss-signature-version=OSS4-HMAC-SHA256", result.url) + self.assertIn("x-oss-security-token=token", result.url) + + def test_presign_query_v1(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v1' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key', + parameters={"x-oss-process": "abc"} + ) + + expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=1) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + self.assertIn("x-oss-process=abc", result.url) + + expires = datetime.timedelta(minutes=50) + expiration = datetime.datetime.now(datetime.timezone.utc) + expires + result = client.presign(request, expires=expires) + self.assertEqual('GET', result.method) + self.assertLess((result.expiration - expiration).seconds, 2) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn(f'Expires={int(result.expiration.timestamp())}', result.url) + self.assertIn("Signature=", result.url) + self.assertIn("x-oss-process=abc", result.url) + + def test_presign_query_v4(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v4' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key', + parameters={"x-oss-process": "abc"} + ) + + timedelta = datetime.timedelta(hours=1) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + credential = f'ak/{datetime_now.strftime("%Y%m%d")}/cn-hangzhou/oss/aliyun_v4_request' + + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(expiration, result.expiration) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn(f'x-oss-date={datetime_now.strftime("%Y%m%dT%H%M%SZ")}', result.url) + self.assertTrue(f'x-oss-expires={int(timedelta.seconds)}' in result.url or f'x-oss-expires={int(timedelta.seconds-1)}' in result.url) + self.assertIn("x-oss-signature=", result.url) + self.assertIn(f'x-oss-credential={quote(credential, safe="")}', result.url) + self.assertIn("x-oss-signature-version=OSS4-HMAC-SHA256", result.url) + self.assertIn("x-oss-process=abc", result.url) + + def test_presign(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v1' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + request = model.GetObjectRequest( + bucket='bucket', + key='key', + version_id='versionId' + ) + + expiration = datetime.datetime.fromtimestamp(1699807420) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn('Expires=1699807420', result.url) + self.assertIn("Signature=dcLTea%2BYh9ApirQ8o8dOPqtvJXQ%3D", result.url) + + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk", "token") + client = Client(cfg) + request = model.GetObjectRequest( + bucket='bucket', + key='key+123', + version_id='versionId' + ) + + expiration = datetime.datetime.fromtimestamp(1699808204) + result = client.presign(request, expiration=expiration) + + self.assertEqual('GET', result.method) + self.assertEqual(0, len(result.signed_headers)) + self.assertIn("bucket.oss-cn-hangzhou.aliyuncs.com/key%2B123?", result.url) + self.assertIn("OSSAccessKeyId=ak", result.url) + self.assertIn('Expires=1699808204', result.url) + self.assertIn("Signature=jzKYRrM5y6Br0dRFPaTGOsbrDhY%3D", result.url) + self.assertIn('security-token=token', result.url) + + + def test_presign_fail(self): + cfg = config.load_default() + cfg.region = 'cn-hangzhou' + cfg.signature_version = 'v4' + cfg.credentials_provider = credentials.StaticCredentialsProvider("ak", "sk") + client = Client(cfg) + + #unsupport request + request = model.ListObjectsV2Request( + bucket='bucket' + ) + + try: + client.presign(request) + self.fail("should not here") + except Exception as err: + self.assertIsInstance(err, exceptions.ParamInvalidError) + + #greater than 7 days + request = model.GetObjectRequest( + bucket='bucket', + key='key+123', + version_id='versionId' + ) + try: + timedelta = datetime.timedelta(days=8) + datetime_now = datetime.datetime.now(datetime.timezone.utc) + expiration = datetime_now + timedelta + client.presign(request, expiration=expiration) + self.fail("should not here") + except Exception as err: + self.assertIsInstance(err, exceptions.PresignExpirationError) diff --git a/tests/unit/test_serde.py b/tests/unit/test_serde.py new file mode 100644 index 0000000..49fd061 --- /dev/null +++ b/tests/unit/test_serde.py @@ -0,0 +1,1917 @@ +# pylint: skip-file +import unittest +import datetime +import xml.etree.ElementTree as ET +from enum import Enum +from typing import List, Optional, Any, cast +from alibabacloud_oss_v2 import serde +from alibabacloud_oss_v2 import serde_utils +from alibabacloud_oss_v2 import exceptions +from alibabacloud_oss_v2.types import ( + OperationInput, + OperationOutput, + CaseInsensitiveDict, + HttpResponse, + HttpRequest, + MutableMapping +) + +class HttpResponseStub(HttpResponse): + def __init__(self, **kwargs) -> None: + super(HttpResponseStub, self).__init__() + self._data = kwargs.pop("data") + self._is_closed = False + self._is_stream_consumed = False + + @property + def request(self) -> HttpRequest: + return None + + @property + def is_closed(self) -> bool: + return self._is_closed + + @property + def is_stream_consumed(self) -> bool: + return self._is_stream_consumed + + @property + def status_code(self) -> int: + return 200 + + @property + def headers(self) -> MutableMapping[str, str]: + return {} + + @property + def reason(self) -> str: + return "OK" + + @property + def content(self) -> bytes: + return self._data + + def __repr__(self) -> str: + return 'HttpResponseStub' + + def __enter__(self) -> "HttpResponseStub": + return self + + def __exit__(self, *args) -> None: + self.close() + + def close(self) -> None: + if not self.is_closed: + self._is_closed = True + + def read(self) -> bytes: + return self.content + + def iter_bytes(self): + return iter([]) + + +class TestSerdeXml(unittest.TestCase): + def test_serialize_xml(self): + class BasicTypeMode(serde.Model): + """struct with basic type""" + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + "int_field": {"tag": "xml", "rename": "IntFiled"}, + "bool_field": {"tag": "xml", "rename": "BoolFiled"}, + "float_field": {"tag": "xml", "rename": "FloatFiled"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + str_field: Optional[str] = None, + int_field: Optional[int] = None, + bool_field: Optional[bool] = None, + float_field: Optional[float] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + self.int_field = int_field + self.bool_field = bool_field + self.float_field = float_field + + class BasicTypeList(serde.Model): + """struct with basic type list""" + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + } + _xml_map = { + "name": "BasicTypeList" + } + + def __init__( + self, + str_field: Optional[List[str]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + + class MixedType(serde.Model): + """struct with mixed type list""" + _attribute_map = { + "mixed_str_field": {"tag": "xml", "rename": "MixedStrFiled"}, + "mixed_int_field": {"tag": "xml", "rename": "MixedIntFiled"}, + "basic_type_list_sturct_filed": {"tag": "xml", "rename": "BasicTypeListFiled"}, + "basic_type_sturct_fileds": {"tag": "xml"}, + } + _xml_map = { + "name": "MixedTypeConfiguration" + } + + def __init__( + self, + mixed_str_field: Optional[str] = None, + mixed_int_field: Optional[int] = None, + basic_type_list_sturct_filed: Optional[BasicTypeList] = None, + basic_type_sturct_fileds: Optional[List[BasicTypeMode]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.mixed_str_field = mixed_str_field + self.mixed_int_field = mixed_int_field + self.basic_type_list_sturct_filed = basic_type_list_sturct_filed + self.basic_type_sturct_fileds = basic_type_sturct_fileds + + model = MixedType( + mixed_str_field='mixed_str', + mixed_int_field='1111', + basic_type_list_sturct_filed=BasicTypeList( + str_field=['123', '456', '789'] + ), + basic_type_sturct_fileds=[ + BasicTypeMode( + str_field='str-1', + int_field='1', + bool_field=False, + float_field=1.5, + ), + BasicTypeMode( + str_field='str-2', + int_field='2', + bool_field=True, + float_field=2.5, + ), + ] + ) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual(5, len(root.findall('*'))) + + self.assertEqual('MixedTypeConfiguration', root.tag) + self.assertEqual('mixed_str', root.findtext('MixedStrFiled')) + self.assertEqual('1111', root.findtext('MixedIntFiled')) + elems = root.findall('BasicTypeList') + self.assertEqual(1, len(elems)) + self.assertEqual('BasicTypeList', elems[0].tag) + elems = root.findall('BasicTypeList//') + self.assertEqual('StrFiled', elems[0].tag) + self.assertEqual(3, len(elems)) + self.assertEqual('123', elems[0].text) + self.assertEqual('456', elems[1].text) + self.assertEqual('789', elems[2].text) + + elems = root.findall('BasicType') + self.assertEqual(2, len(elems)) + self.assertEqual('str-1', elems[0].findtext('StrFiled')) + self.assertEqual('1', elems[0].findtext('IntFiled')) + self.assertEqual('false', elems[0].findtext('BoolFiled')) + self.assertEqual('1.5', elems[0].findtext('FloatFiled')) + + self.assertEqual('str-2', elems[1].findtext('StrFiled')) + self.assertEqual('2', elems[1].findtext('IntFiled')) + self.assertEqual('true', elems[1].findtext('BoolFiled')) + self.assertEqual('2.5', elems[1].findtext('FloatFiled')) + + def test_serialize_xml_with_root_tag(self): + class BasicTypeMode(serde.Model): + """struct with basic type""" + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + "int_field": {"tag": "xml", "rename": "IntFiled"}, + "bool_field": {"tag": "xml", "rename": "BoolFiled"}, + "float_field": {"tag": "xml", "rename": "FloatFiled"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + str_field: Optional[str] = None, + int_field: Optional[int] = None, + bool_field: Optional[bool] = None, + float_field: Optional[float] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + self.int_field = int_field + self.bool_field = bool_field + self.float_field = float_field + + model = BasicTypeMode( + str_field='str-2', int_field='2', bool_field=True) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType', root.tag) + self.assertEqual(3, len(root.findall('*'))) + + xml_data = serde.serialize_xml(model, root='BasicType-123') + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType-123', root.tag) + self.assertEqual(3, len(root.findall('*'))) + + def test_serialize_xml_datatime(self): + class BasicTypeMode(serde.Model): + """struct with datatime type""" + _attribute_map = { + "isotime_field": {"tag": "xml", "rename": "IsoTimeFiled"}, + "httptime_field": {"tag": "xml", "rename": "HttpTimeFiled", "type": "httptime"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + isotime_field: Optional[datetime.datetime] = None, + httptime_field: Optional[datetime.datetime] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.isotime_field = isotime_field + self.httptime_field = httptime_field + + model = BasicTypeMode( + isotime_field=datetime.datetime.fromtimestamp(1702783809), + httptime_field=datetime.datetime.fromtimestamp(1702783809)) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType', root.tag) + self.assertEqual(2, len(root.findall('*'))) + self.assertEqual('2023-12-17T03:30:09Z', + root.findtext('IsoTimeFiled')) + self.assertEqual('Sun, 17 Dec 2023 03:30:09 GMT', + root.findtext('HttpTimeFiled')) + + def test_serialize_enum_type(self): + class EnumType(str, Enum): + PRIVATE = 'private' + PUBLICREAD = 'public-read' + PUBLICREADWRITE = 'public-read-write' + + class BasicTypeMode(serde.Model): + """struct with enum type""" + _attribute_map = { + "enum_field": {"tag": "xml", "rename": "EnumFiled"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + enum_field: Optional[EnumType] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.enum_field = enum_field + + model = BasicTypeMode(enum_field=EnumType.PRIVATE) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType', root.tag) + self.assertEqual(1, len(root.findall('*'))) + self.assertEqual('private', root.findtext('EnumFiled')) + + def test_serialize_not_support_type(self): + class BasicTypeMode(serde.Model): + """struct with other type""" + _attribute_map = { + "isotime_field": {"tag": "xml", "rename": "IsoTimeFiled"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + isotime_field: Optional[any] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.isotime_field = isotime_field + + class NotModeStruct: + """test struct""" + + def __init__( + self, + field: str, + ) -> None: + self.field = field + + try: + model = BasicTypeMode(isotime_field=NotModeStruct(field="test")) + serde.serialize_xml(model) + self.fail("not here") + except exceptions.SerializationError as e: + msg = str(e) + self.assertTrue( + 'Serialization raised an exception: Unsupport type' in msg) + + def test_deserialize_xml(self): + + class BasicTypeMode(serde.Model): + """struct with basic type""" + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + "int_field": {"tag": "xml", "rename": "IntFiled", "type": "int"}, + "bool_field": {"tag": "xml", "rename": "BoolFiled", "type": "bool"}, + "float_field": {"tag": "xml", "rename": "FloatFiled", "type": "float"}, + "isotime_field": {"tag": "xml", "rename": "IsoTimeFiled", "type": "datetime"}, + "httptime_field": {"tag": "xml", "rename": "HttpTimeFiled", "type": "datetime,httptime"}, + "unixtime_field": {"tag": "xml", "rename": "UnixTimeFiled", "type": "datetime,unixtime"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + str_field: Optional[str] = None, + int_field: Optional[int] = None, + bool_field: Optional[bool] = None, + float_field: Optional[float] = None, + isotime_field: Optional[datetime.datetime] = None, + httptime_field: Optional[datetime.datetime] = None, + unixtime_field: Optional[datetime.datetime] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + self.int_field = int_field + self.bool_field = bool_field + self.float_field = float_field + self.isotime_field = isotime_field + self.httptime_field = httptime_field + self.unixtime_field = unixtime_field + + datetime_now = datetime.datetime.fromtimestamp(1702783809) + model = BasicTypeMode( + str_field='str-1', + int_field='1', + bool_field=False, + float_field=1.5, + isotime_field=datetime_now, + httptime_field=datetime_now, + unixtime_field=datetime_now + ) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType', root.tag) + self.assertEqual(7, len(root.findall('*'))) + self.assertEqual('2023-12-17T03:30:09Z', + root.findtext('IsoTimeFiled')) + self.assertEqual('Sun, 17 Dec 2023 03:30:09 GMT', + root.findtext('HttpTimeFiled')) + self.assertEqual('1702783809', root.findtext('UnixTimeFiled')) + + model2 = BasicTypeMode() + date_time = datetime.datetime.fromtimestamp( + 1702783809, tz=datetime.timezone.utc) + serde.deserialize_xml(xml_data, model2) + self.assertEqual('str-1', model2.str_field) + self.assertEqual(1, model2.int_field) + self.assertEqual(False, model2.bool_field) + self.assertEqual(1.5, model2.float_field) + self.assertEqual(date_time, model2.isotime_field) + self.assertEqual(date_time, model2.httptime_field) + self.assertEqual(date_time, model2.unixtime_field) + + def test_deserialize_xml_list(self): + + class BasicTypeMode(serde.Model): + """struct with basic type""" + _attribute_map = { + "str_fields": {"tag": "xml", "rename": "StrFiled","type": "[str]"}, + "int_fields": {"tag": "xml", "rename": "IntFiled", "type": "[int]"}, + "bool_fields": {"tag": "xml", "rename": "BoolFiled", "type": "[bool]"}, + "float_fields": {"tag": "xml", "rename": "FloatFiled", "type": "[float]"}, + "isotime_fields": {"tag": "xml", "rename": "IsoTimeFiled", "type": "[datetime]"}, + "httptime_fields": {"tag": "xml", "rename": "HttpTimeFiled", "type": "[datetime],httptime"}, + "unixtime_fields": {"tag": "xml", "rename": "UnixTimeFiled", "type": "[datetime],unixtime"}, + } + _xml_map = { + "name": "BasicType" + } + + def __init__( + self, + str_fields: Optional[List[str]] = None, + int_fields: Optional[List[int]] = None, + bool_fields: Optional[List[bool]] = None, + float_fields: Optional[List[float]] = None, + isotime_fields: Optional[List[datetime.datetime]] = None, + httptime_fields: Optional[List[datetime.datetime]] = None, + unixtime_fields: Optional[List[datetime.datetime]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_fields = str_fields + self.int_fields = int_fields + self.bool_fields = bool_fields + self.float_fields = float_fields + self.isotime_fields = isotime_fields + self.httptime_fields = httptime_fields + self.unixtime_fields = unixtime_fields + + datetime1 = datetime.datetime.fromtimestamp(1702783809) + datetime2 = datetime.datetime.fromtimestamp(1702783819) + datetime3 = datetime.datetime.fromtimestamp(1702783829) + model = BasicTypeMode( + str_fields=['str-1'], + int_fields=[1,2,3], + bool_fields=[False,False,True], + float_fields=[1.5, 2.5, 3,5], + isotime_fields=[datetime1], + httptime_fields=[datetime1,datetime2], + unixtime_fields=[datetime1,datetime2,datetime3], + ) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('BasicType', root.tag) + + elems = root.findall('StrFiled') + self.assertEqual(1, len(elems)) + self.assertEqual('str-1', elems[0].text) + + elems = root.findall('IntFiled') + self.assertEqual(3, len(elems)) + self.assertEqual('1', elems[0].text) + self.assertEqual('2', elems[1].text) + self.assertEqual('3', elems[2].text) + + elems = root.findall('BoolFiled') + self.assertEqual(3, len(elems)) + self.assertEqual('false', elems[0].text) + self.assertEqual('false', elems[1].text) + self.assertEqual('true', elems[2].text) + + elems = root.findall('FloatFiled') + self.assertEqual(4, len(elems)) + self.assertEqual('1.5', elems[0].text) + self.assertEqual('2.5', elems[1].text) + self.assertEqual('3', elems[2].text) + self.assertEqual('5', elems[3].text) + + elems = root.findall('IsoTimeFiled') + self.assertEqual(1, len(elems)) + self.assertEqual('2023-12-17T03:30:09Z', elems[0].text) + + + elems = root.findall('HttpTimeFiled') + self.assertEqual(2, len(elems)) + self.assertEqual('Sun, 17 Dec 2023 03:30:09 GMT', elems[0].text) + self.assertEqual('Sun, 17 Dec 2023 03:30:19 GMT', elems[1].text) + + elems = root.findall('UnixTimeFiled') + self.assertEqual(3, len(elems)) + self.assertEqual('1702783809', elems[0].text) + self.assertEqual('1702783819', elems[1].text) + self.assertEqual('1702783829', elems[2].text) + + model2 = BasicTypeMode() + self.assertIsNone(model2.str_fields) + self.assertIsNone(model2.int_fields) + self.assertIsNone(model2.bool_fields) + self.assertIsNone(model2.float_fields) + self.assertIsNone(model2.isotime_fields) + self.assertIsNone(model2.httptime_fields) + self.assertIsNone(model2.unixtime_fields) + + serde.deserialize_xml(xml_data, model2) + self.assertIsInstance(model2.str_fields, List) + self.assertSequenceEqual(model.str_fields, model2.str_fields) + + self.assertIsInstance(model2.int_fields, List) + self.assertSequenceEqual(model.int_fields, model2.int_fields) + + self.assertIsInstance(model2.bool_fields, List) + self.assertSequenceEqual(model.bool_fields, model2.bool_fields) + + self.assertIsInstance(model2.float_fields, List) + self.assertSequenceEqual(model.float_fields, model2.float_fields) + + datetime1_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + datetime2_utc = datetime.datetime.fromtimestamp(1702783819, tz=datetime.timezone.utc) + datetime3_utc = datetime.datetime.fromtimestamp(1702783829, tz=datetime.timezone.utc) + + self.assertIsInstance(model2.isotime_fields, List) + self.assertEqual(datetime1_utc, model2.isotime_fields[0]) + + self.assertIsInstance(model2.httptime_fields, List) + self.assertEqual(datetime1_utc, model2.httptime_fields[0]) + self.assertEqual(datetime2_utc, model2.httptime_fields[1]) + + self.assertIsInstance(model2.unixtime_fields, List) + self.assertEqual(datetime1_utc, model2.unixtime_fields[0]) + self.assertEqual(datetime2_utc, model2.unixtime_fields[1]) + self.assertEqual(datetime3_utc, model2.unixtime_fields[2]) + + def test_deserialize_xml_multi_layer_struct(self): + + class ModelOne(serde.Model): + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + "int_field": {"tag": "xml", "rename": "IntFiled", "type": "int"}, + } + _xml_map = { + "name": "ModelOne" + } + + def __init__( + self, + str_field: Optional[str] = None, + int_field: Optional[int] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + self.int_field = int_field + + class ModelTwo(serde.Model): + _attribute_map = { + "bool_field": {"tag": "xml", "rename": "BoolFiled", "type": "bool"}, + "float_field": {"tag": "xml", "rename": "FloatFiled", "type": "float"}, + "model_one": {"tag": "xml", "rename": "ModelOne", "type": "ModelOne"}, + } + _dependency_map = { + "ModelOne": {"new":lambda :ModelOne()} + } + _xml_map = { + "name": "ModelTwo" + } + + def __init__( + self, + bool_field: Optional[str] = None, + float_field: Optional[int] = None, + model_one: Optional[ModelOne] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.bool_field = bool_field + self.float_field = float_field + self.model_one = model_one + + class ModelThree(serde.Model): + _attribute_map = { + "isotime_field": {"tag": "xml", "rename": "IsoTimeFiled", "type": "datetime"}, + } + _xml_map = { + "name": "ModelThree" + } + + def __init__( + self, + isotime_field: Optional[datetime.datetime] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.isotime_field = isotime_field + + class ModelTop(serde.Model): + _attribute_map = { + "id": {"tag": "xml", "rename": "Id", "type": "str"}, + "model_two": {"tag": "xml", "rename": "ModelTwo", "type": "ModelTwo"}, + "model_threes": {"tag": "xml", "rename": "ModelThree", "type": "[ModelThree]"}, + } + _xml_map = { + "name": "ModelTop" + } + _dependency_map = { + "ModelTwo": {"new":lambda :ModelTwo()}, + "ModelThree": {"new":lambda :ModelThree()} + } + + def __init__( + self, + id: Optional[str] = None, + model_two: Optional[ModelTwo] = None, + model_threes: Optional[List[ModelThree]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.id = id + self.model_two = model_two + self.model_threes = model_threes + + datetime_utc1 = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + datetime_utc2 = datetime.datetime.fromtimestamp(1702783819, tz=datetime.timezone.utc) + model = ModelTop( + id='id-1234', + model_two=ModelTwo( + bool_field=True, + #float_field=3.5, + model_one=ModelOne( + str_field='str-123', + #int_field=123, + ) + ), + model_threes=[ + ModelThree(isotime_field=datetime_utc1), + ModelThree(isotime_field=datetime_utc2), + ] + ) + + xml_data = serde.serialize_xml(model) + self.assertIsNotNone(xml_data) + self.assertTrue(len(xml_data) > 0) + + root = ET.fromstring(xml_data) + self.assertEqual('ModelTop', root.tag) + + model2 = ModelTop() + serde.deserialize_xml(xml_data, model2) + self.assertEqual('id-1234', model2.id) + self.assertEqual(True, model2.model_two.bool_field) + self.assertIsNone(model2.model_two.float_field) + self.assertEqual('str-123', model2.model_two.model_one.str_field) + self.assertIsNone(model2.model_two.model_one.int_field) + self.assertIsInstance(model2.model_threes, List) + self.assertEqual(2, len(model2.model_threes)) + self.assertEqual(datetime_utc1, model2.model_threes[0].isotime_field) + self.assertEqual(datetime_utc2, model2.model_threes[1].isotime_field) + + #test empty filed + xml_data = 'false' + model3 = ModelTop() + serde.deserialize_xml(xml_data, model3) + self.assertEqual(None, model3.id) + self.assertEqual(False, model3.model_two.bool_field) + self.assertIsNone(model3.model_two.float_field) + self.assertIsNone(model3.model_two.model_one) + self.assertIsInstance(model3.model_threes, List) + self.assertEqual(2, len(model3.model_threes)) + self.assertIsNone(model3.model_threes[0].isotime_field) + self.assertIsNone(model3.model_threes[1].isotime_field) + + #test empty filed + xml_data = '' + model4 = ModelTop() + serde.deserialize_xml(xml_data, model4) + self.assertEqual(None, model4.id) + self.assertIsNone(model4.model_two.bool_field) + self.assertIsNone(model4.model_two.float_field) + self.assertIsNone(model4.model_two.model_one) + self.assertIsInstance(model4.model_threes, List) + self.assertEqual(1, len(model4.model_threes)) + self.assertIsNone(model4.model_threes[0].isotime_field) + + #invalid xml + xml_data = 'ModelTop>' + model5 = ModelTop() + try: + serde.deserialize_xml(xml_data, model5) + self.fail("should not here") + except ET.ParseError: + pass + except: + self.fail("should not here") + + def test_deserialize_xml_only_root(self): + + class ModelOne(serde.Model): + _attribute_map = { + "str_root": {"tag": "xml", "rename": "."}, + } + _xml_map = { + "name": "ModelOne" + } + + def __init__( + self, + str_root: Optional[str] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.str_root = str_root + + xml_data = '123' + model = ModelOne() + serde.deserialize_xml(xml_data, model) + self.assertEqual("123", model.str_root) + + +class TestSerdeOperation(unittest.TestCase): + def test_serialize_input(self): + class SubConfiguration(serde.Model): + def __init__( + self, + str_field: Optional[str] = None, + int_field: Optional[int] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_field = str_field + self.int_field = int_field + + _attribute_map = { + "str_field": {"tag": "xml", "rename": "StrFiled"}, + "int_field": {"tag": "xml", "rename": "IntFiled", "type":"int"}, + } + _xml_map = { + "name": "SubConfiguration" + } + + class RootConfiguration(serde.Model): + def __init__( + self, + id: str, + text: str, + sub_configuration: Optional[List[SubConfiguration]] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.id = id + self.text = text + self.sub_configuration = sub_configuration + + _attribute_map = { + "id": {"tag": "xml", "rename": "Id"}, + "text": {"tag": "xml", "rename": "Text"}, + "sub_configuration": {"tag": "xml", "rename": "SubConfiguration", "type": "[SubConfiguration]"}, + } + _xml_map = { + "name": "RootConfiguration" + } + + + class PutApiRequest(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + str_header: Optional[str] = None, + int_header: Optional[int] = None, + bool_header: Optional[bool] = None, + float_header: Optional[float] = None, + isotime_header: Optional[datetime.datetime] = None, + httptime_header: Optional[datetime.datetime] = None, + unixtime_header: Optional[datetime.datetime] = None, + str_param: Optional[str] = None, + int_param: Optional[int] = None, + bool_param: Optional[bool] = None, + float_param: Optional[float] = None, + isotime_param: Optional[datetime.datetime] = None, + httptime_param: Optional[datetime.datetime] = None, + unixtime_param: Optional[datetime.datetime] = None, + configuration: Optional[RootConfiguration] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.str_header = str_header + self.int_header = int_header + self.bool_header = bool_header + self.float_header = float_header + self.isotime_header = isotime_header + self.httptime_header = httptime_header + self.unixtime_header = unixtime_header + self.str_param = str_param + self.int_param = int_param + self.bool_param = bool_param + self.float_param = float_param + self.isotime_param = isotime_param + self.httptime_param = httptime_param + self.unixtime_param = unixtime_param + self.configuration = configuration + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "str_header": {"tag": "input", "position": "header", "rename": "x-oss-str"}, + "int_header": {"tag": "input", "position": "header", "rename": "x-oss-int"}, + "bool_header": {"tag": "input", "position": "header", "rename": "x-oss-bool"}, + "float_header": {"tag": "input", "position": "header", "rename": "x-oss-float"}, + "isotime_header": {"tag": "input", "position": "header", "rename": "x-oss-isotime"}, + "httptime_header": {"tag": "input", "position": "header", "rename": "x-oss-httptime", "type":"datetime,httptime"}, + "unixtime_header": {"tag": "input", "position": "header", "rename": "x-oss-unixtime", "type":"datetime,unixtime"}, + "str_param": {"tag": "input", "position": "query", "rename": "param-str"}, + "int_param": {"tag": "input", "position": "query", "rename": "param-int"}, + "bool_param": {"tag": "input", "position": "query", "rename": "param-bool"}, + "float_param": {"tag": "input", "position": "query", "rename": "param-float"}, + "isotime_param": {"tag": "input", "position": "query", "rename": "param-isotime"}, + "httptime_param": {"tag": "input", "position": "query", "rename": "param-httptime", "type":"datetime,httptime"}, + "unixtime_param": {"tag": "input", "position": "query", "rename": "param-unixtime", "type":"datetime,unixtime"}, + "configuration": {"tag": "input", "position": "body", "rename": "Configuration", "type": "xml"}, + } + datetime_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + datetime2_utc = datetime.datetime.fromtimestamp(1702783819, tz=datetime.timezone.utc) + request = PutApiRequest( + bucket="bucket-124", + acl="private", + str_header = "str_header", + int_header = 123, + bool_header = True, + float_header = 2.5, + isotime_header = datetime_utc, + httptime_header = datetime_utc, + unixtime_header = datetime_utc, + str_param = "str_param", + int_param = 456, + bool_param = False, + float_param = 4.5, + isotime_param = datetime2_utc, + httptime_param = datetime2_utc, + unixtime_param = datetime2_utc, + configuration=RootConfiguration( + id="id-124", + text="just for test", + sub_configuration=[ + SubConfiguration( + str_field='str-1', + int_field=111 + ), + SubConfiguration( + str_field='str-2', + int_field=222 + ), + ] + ) + ) + + # miss required field + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + + try: + serde.serialize_input(request, op_input) + self.fail("shoud not here") + except exceptions.ParamRequiredError as err: + self.assertIn("missing required field, key", str(err)) + except: + self.fail("shoud not here") + + + #normal case + request.key = 'key' + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serde.serialize_input(request, op_input) + self.assertEqual('str_header', op_input.headers.get('x-oss-str')) + self.assertEqual('123', op_input.headers.get('x-oss-int')) + self.assertEqual('true', op_input.headers.get('x-oss-bool')) + self.assertEqual('2.5', op_input.headers.get('x-oss-float')) + self.assertEqual('2023-12-17T03:30:09Z', op_input.headers.get('x-oss-isotime')) + self.assertEqual('Sun, 17 Dec 2023 03:30:09 GMT', op_input.headers.get('x-oss-httptime')) + self.assertEqual('1702783809', op_input.headers.get('x-oss-unixtime')) + + self.assertEqual('str_param', op_input.parameters.get('param-str')) + self.assertEqual('456', op_input.parameters.get('param-int')) + self.assertEqual('false', op_input.parameters.get('param-bool')) + self.assertEqual('4.5', op_input.parameters.get('param-float')) + self.assertEqual('2023-12-17T03:30:19Z', op_input.parameters.get('param-isotime')) + self.assertEqual('Sun, 17 Dec 2023 03:30:19 GMT', op_input.parameters.get('param-httptime')) + self.assertEqual('1702783819', op_input.parameters.get('param-unixtime')) + + root = ET.fromstring(op_input.body) + self.assertEqual('Configuration', root.tag) + self.assertEqual('id-124', root.findtext('Id')) + self.assertEqual('just for test', root.findtext('Text')) + elems = root.findall('SubConfiguration') + self.assertEqual(2, len(elems)) + self.assertEqual('str-1', elems[0].findtext('StrFiled')) + self.assertEqual('111', elems[0].findtext('IntFiled')) + self.assertEqual('str-2', elems[1].findtext('StrFiled')) + self.assertEqual('222', elems[1].findtext('IntFiled')) + + def test_serialize_non_xml_body(self): + class PutApiRequest(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + str_header: Optional[str] = None, + str_param: Optional[str] = None, + configuration:Optional[Any] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.str_header = str_header + self.str_param = str_param + self.configuration = configuration + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "str_header": {"tag": "input", "position": "header", "rename": "x-oss-str"}, + "str_param": {"tag": "input", "position": "query", "rename": "param-str"}, + "configuration": {"tag": "input", "position": "body"}, + } + + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + str_header='str-1', + str_param='str-2', + configuration='hello world', + headers={ + 'X-oss-str':'str-11', + 'X-oss-int':'1234', + }, + parameters={ + 'param-str':'1234', + 'Param-str':'str-11', + } + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serde.serialize_input(request, op_input) + self.assertEqual('str-1', op_input.headers.get('x-oss-str')) + self.assertEqual('1234', op_input.headers.get('x-oss-int')) + self.assertEqual('str-2', op_input.parameters.get('param-str')) + self.assertEqual('str-11', op_input.parameters.get('Param-str')) + self.assertEqual('hello world', op_input.body) + + #default payload + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + payload='123', + ) + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serde.serialize_input(request, op_input) + self.assertEqual(0, len(op_input.headers.items())) + self.assertEqual(0, len(op_input.parameters.items())) + self.assertEqual('123', op_input.body) + + def test_serialize_custom_serializer(self): + class PutApiRequest(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + } + def add_content_type(request: serde.Model, op_input: OperationInput): + op_input.headers.update( + { + 'x-oss-str':'str-1', + 'x-oss-key':request.key, + }) + op_input.parameters.update( + { + 'param-str':'str-2', + 'Param-str':'str-11', + }) + + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serializer = [ + add_content_type, + ] + serde.serialize_input(request, op_input, custom_serializer=serializer) + self.assertEqual('str-1', op_input.headers.get('x-oss-str')) + self.assertEqual('key-456', op_input.headers.get('x-oss-key')) + self.assertEqual('str-2', op_input.parameters.get('param-str')) + self.assertEqual('str-11', op_input.parameters.get('Param-str')) + self.assertIsNone(op_input.body) + + def test_serialize_non_requestmodel(self): + class PutApiRequest(serde.Model): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + } + + class PutApiRequest2: + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + } + + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + + try: + serde.serialize_input(request, op_input) + self.fail("shoud not here") + except exceptions.SerializationError as err: + self.assertIn("is not subclass of serde.RequestModel", str(err)) + self.assertIn("PutApiRequest", str(err)) + except: + self.fail("shoud not here") + + + request = PutApiRequest2( + bucket='bucket-123', + key='key-456', + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + + try: + serde.serialize_input(request, op_input) + self.fail("shoud not here") + except exceptions.SerializationError as err: + self.assertIn("is not subclass of serde.RequestModel", str(err)) + self.assertIn("PutApiRequest2", str(err)) + except: + self.fail("shoud not here") + + + def test_serialize_dict_header(self): + class PutApiRequest(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + str_header: Optional[str] = None, + str_param: Optional[str] = None, + configuration:Optional[Any] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.str_header = str_header + self.str_param = str_param + self.configuration = configuration + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "str_header": {"tag": "input", "position": "header", "rename": "x-oss-str"}, + "str_param": {"tag": "input", "position": "query", "rename": "param-str"}, + "dict_header": {"tag": "input", "position": "header", "rename": "x-oss-meta-", "type":"dict,usermeta"}, + } + + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + str_header='str-1', + str_param='str-2', + dict_header={ + 'key1':'value1', + 'key2':'value2', + }, + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serde.serialize_input(request, op_input) + self.assertEqual('str-1', op_input.headers.get('x-oss-str')) + self.assertEqual('str-2', op_input.parameters.get('param-str')) + self.assertEqual('value1', op_input.headers.get('x-oss-meta-key1')) + self.assertEqual('value2', op_input.headers.get('x-oss-meta-key2')) + + + def test_serialize_enum_type(self): + class EnumType(str, Enum): + PRIVATE = 'private' + PUBLICREAD = 'public-read' + PUBLICREADWRITE = 'public-read-write' + class PutApiRequest(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + enum_header: Optional[EnumType] = None, + enum_param: Optional[EnumType] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.enum_header = enum_header + self.enum_param = enum_param + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "enum_header": {"tag": "input", "position": "header", "rename": "x-oss-enum"}, + "enum_param": {"tag": "input", "position": "query", "rename": "param-enum"}, + } + + request = PutApiRequest( + bucket='bucket-123', + key='key-456', + enum_header=EnumType.PUBLICREAD, + enum_param=EnumType.PUBLICREADWRITE, + ) + + op_input = OperationInput( + op_name='TestApi', + method='GET', + bucket=request.bucket, + key=request.key, + ) + serde.serialize_input(request, op_input) + self.assertEqual('public-read', op_input.headers.get('x-oss-enum')) + self.assertEqual('public-read-write', op_input.parameters.get('param-enum')) + + def test_deserialize_output(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + result = PutApiResult() + + headers = CaseInsensitiveDict({'key':'value', 'key1':'value1', 'x-oss-request-id':'123'}) + op_output = OperationOutput( + status='OK', + status_code=200, + headers= headers, + ) + + serde.deserialize_output(result, op_output) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('123', result.request_id) + self.assertEqual('value', result.headers.get('key')) + self.assertEqual('value1', result.headers.get('key1')) + self.assertEqual('123', result.headers.get('x-oss-request-id')) + + + def test_deserialize_response_inline_body_xml(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + str_xml: Optional[str] = None, + int_xml: Optional[int] = None, + bool_xml: Optional[bool] = None, + float_xml: Optional[float] = None, + isotime_xml: Optional[datetime.datetime] = None, + httptime_xml: Optional[datetime.datetime] = None, + unixtime_xml: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_xml = str_xml + self.int_xml = int_xml + self.bool_xml = bool_xml + self.float_xml = float_xml + self.isotime_xml = isotime_xml + self.httptime_xml = httptime_xml + self.unixtime_xml = unixtime_xml + + _attribute_map = { + "str_xml": {"tag": "xml", "rename": "StrField"}, + "int_xml": {"tag": "xml", "rename": "IntField", "type":"int"}, + "bool_xml": {"tag": "xml", "rename": "BoolField", "type":"bool"}, + "float_xml": {"tag": "xml", "rename": "FloatField", "type":"float"}, + "isotime_xml": {"tag": "xml", "rename": "IsotimeField", "type":"datetime"}, + "httptime_xml": {"tag": "xml", "rename": "HttptimeField", "type":"datetime,httptime"}, + "unixtime_xml": {"tag": "xml", "rename": "UnixtimeField", "type":"datetime,unixtime"}, + } + + xml_data = r''' + + str-1 + 1234 + true + 3.5 + 2023-12-17T03:30:09.000000Z + Sun, 17 Dec 2023 03:30:09 GMT + 1702783809 + + ''' + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + headers= {}, + http_response=HttpResponseStub(data=xml_data) + ) + datetime_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + deserializer = [serde.deserialize_output_xmlbody] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('', result.request_id) + self.assertEqual('str-1', result.str_xml) + self.assertEqual(1234, result.int_xml) + self.assertEqual(True, result.bool_xml) + self.assertEqual(3.5, result.float_xml) + self.assertEqual(datetime_utc, result.isotime_xml) + self.assertEqual(datetime_utc, result.httptime_xml) + self.assertEqual(datetime_utc, result.unixtime_xml) + + def test_deserialize_response_outline_body_xml(self): + + class Configuration(serde.Model): + def __init__( + self, + str_xml: Optional[str] = None, + int_xml: Optional[int] = None, + bool_xml: Optional[bool] = None, + float_xml: Optional[float] = None, + isotime_xml: Optional[datetime.datetime] = None, + httptime_xml: Optional[datetime.datetime] = None, + unixtime_xml: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_xml = str_xml + self.int_xml = int_xml + self.bool_xml = bool_xml + self.float_xml = float_xml + self.isotime_xml = isotime_xml + self.httptime_xml = httptime_xml + self.unixtime_xml = unixtime_xml + + _attribute_map = { + "str_xml": {"tag": "xml", "rename": "StrField"}, + "int_xml": {"tag": "xml", "rename": "IntField", "type":"int"}, + "bool_xml": {"tag": "xml", "rename": "BoolField", "type":"bool"}, + "float_xml": {"tag": "xml", "rename": "FloatField", "type":"float"}, + "isotime_xml": {"tag": "xml", "rename": "IsotimeField", "type":"datetime"}, + "httptime_xml": {"tag": "xml", "rename": "HttptimeField", "type":"datetime,httptime"}, + "unixtime_xml": {"tag": "xml", "rename": "UnixtimeField", "type":"datetime,unixtime"}, + } + + _xml_map = {'name':'Configuration'} + + + class PutApiResult(serde.ResultModel): + def __init__( + self, + config: Optional[Configuration] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.config = config + + _attribute_map = { + "config": {"tag": "output", "position":"body", "type":"Configuration,xml"}, + } + _dependency_map = { + "Configuration": {"new": lambda:Configuration()}, + } + + xml_data = r''' + + str-1 + 1234 + true + 3.5 + 2023-12-17T03:30:09.000000Z + Sun, 17 Dec 2023 03:30:09 GMT + 1702783809 + + ''' + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + headers= {}, + http_response=HttpResponseStub(data=xml_data) + ) + datetime_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + deserializer = [serde.deserialize_output_xmlbody] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('', result.request_id) + self.assertEqual('str-1', result.config.str_xml) + self.assertEqual(1234, result.config.int_xml) + self.assertEqual(True, result.config.bool_xml) + self.assertEqual(3.5, result.config.float_xml) + self.assertEqual(datetime_utc, result.config.isotime_xml) + self.assertEqual(datetime_utc, result.config.httptime_xml) + self.assertEqual(datetime_utc, result.config.unixtime_xml) + + + def test_deserialize_response_header(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + str_header: Optional[str] = None, + int_header: Optional[int] = None, + bool_header: Optional[bool] = None, + float_header: Optional[float] = None, + isotime_header: Optional[datetime.datetime] = None, + httptime_header: Optional[datetime.datetime] = None, + unixtime_header: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_header = str_header + self.int_header = int_header + self.bool_header = bool_header + self.float_header = float_header + self.isotime_header = isotime_header + self.httptime_header = httptime_header + self.unixtime_header = unixtime_header + + _attribute_map = { + "str_header": {"tag": "output", "position": "header", "rename": "x-oss-str"}, + "int_header": {"tag": "output", "position": "header", "rename": "x-oss-int", "type":"int"}, + "bool_header": {"tag": "output", "position": "header", "rename": "x-oss-bool", "type":"bool"}, + "float_header": {"tag": "output", "position": "header", "rename": "x-oss-float", "type":"float"}, + "isotime_header": {"tag": "output", "position": "header", "rename": "x-oss-isotime", "type":"datetime"}, + "httptime_header": {"tag": "output", "position": "header", "rename": "x-oss-httptime", "type":"datetime,httptime"}, + "unixtime_header": {"tag": "output", "position": "header", "rename": "x-oss-unixtime", "type":"datetime,unixtime"}, + } + + + result = PutApiResult() + + headers = CaseInsensitiveDict({ + 'x-oss-str':'str-1', + 'x-oss-int':'123', + 'x-oss-bool':'false', + 'x-oss-float':'3.5', + 'x-oss-isotime':'2023-12-17T03:30:09.000000Z', + 'x-oss-httptime':'Sun, 17 Dec 2023 03:30:09 GMT', + 'x-oss-unixtime':'1702783809', + 'x-oss-request-id':'id-12345' + }) + op_output = OperationOutput( + status='OK', + status_code=200, + headers= headers, + ) + datetime_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + deserializer = [serde.deserialize_output_headers] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('id-12345', result.request_id) + self.assertEqual(8, len(result.headers.items())) + self.assertEqual('str-1', result.str_header) + self.assertEqual(123, result.int_header) + self.assertEqual(False, result.bool_header) + self.assertEqual(3.5, result.float_header) + self.assertEqual(datetime_utc, result.isotime_header) + self.assertEqual(datetime_utc, result.httptime_header) + self.assertEqual(datetime_utc, result.unixtime_header) + + def test_deserialize_response_body_and_header(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + str_xml: Optional[str] = None, + int_xml: Optional[int] = None, + float_header: Optional[float] = None, + isotime_header: Optional[datetime.datetime] = None, + + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_xml = str_xml + self.int_xml = int_xml + self.float_header = float_header + self.isotime_header = isotime_header + + _attribute_map = { + "str_xml": {"tag": "xml", "rename": "StrField"}, + "int_xml": {"tag": "xml", "rename": "IntField", "type":"int"}, + "float_header": {"tag": "output", "position": "header", "rename": "x-oss-float", "type":"float"}, + "isotime_header": {"tag": "output", "position": "header", "rename": "x-oss-isotime", "type":"datetime"}, + } + + xml_data = r''' + + str-1 + 1234 + + ''' + headers = CaseInsensitiveDict({ + 'x-oss-float':'3.5', + 'x-oss-isotime':'2023-12-17T03:30:09.000000Z', + 'x-oss-request-id':'id-12345' + }) + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + headers= headers, + http_response=HttpResponseStub(data=xml_data) + ) + datetime_utc = datetime.datetime.fromtimestamp(1702783809, tz=datetime.timezone.utc) + deserializer = [serde.deserialize_output_xmlbody, serde.deserialize_output_headers] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('id-12345', result.request_id) + + self.assertEqual(3.5, result.float_header) + self.assertEqual(datetime_utc, result.isotime_header) + + self.assertEqual('str-1', result.str_xml) + self.assertEqual(1234, result.int_xml) + + def test_deserialize_non_resultmodel(self): + class PutApiResult(serde.Model): + def __init__( + self, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + try: + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + ) + serde.deserialize_output(result, op_output) + self.fail("shoud not here") + except exceptions.DeserializationError as err: + self.assertIn("is not subclass of serde.ResultModel", str(err)) + self.assertIn("PutApiResult", str(err)) + except: + self.fail("shoud not here") + + class PutApiRequest2: + def __init__( + self, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + + try: + result = PutApiRequest2() + op_output = OperationOutput( + status='OK', + status_code=200, + ) + serde.deserialize_output(result, op_output) + self.fail("shoud not here") + except exceptions.DeserializationError as err: + self.assertIn("is not subclass of serde.ResultModel", str(err)) + self.assertIn("PutApiRequest2", str(err)) + except: + self.fail("shoud not here") + + def test_deserialize_response_dict_header(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + str_header: Optional[str] = None, + dict_header: Optional[MutableMapping] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_header = str_header + self.dict_header = dict_header + + _attribute_map = { + "str_header": {"tag": "output", "position": "header", "rename": "x-oss-str"}, + "dict_header": {"tag": "output", "position": "header", "rename": "x-oss-meta-", "type":"dict,usermeta"}, + } + + + result = PutApiResult() + + headers = CaseInsensitiveDict({ + 'x-oss-str':'str-1', + 'x-oss-meta-key1':'value1', + 'x-oss-meta-key2':'value2', + 'x-oss-request-id':'id-12345' + }) + op_output = OperationOutput( + status='OK', + status_code=200, + headers= headers, + ) + deserializer = [serde.deserialize_output_headers] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('id-12345', result.request_id) + self.assertEqual(4, len(result.headers.items())) + self.assertEqual('str-1', result.str_header) + self.assertEqual('value1', result.dict_header.get('key1')) + self.assertEqual('value2', result.dict_header.get('key2')) + + def test_deserialize_response_body_xml_roottag(self): + class PutApiResult(serde.ResultModel): + def __init__( + self, + str_xml: Optional[str] = None, + int_xml: Optional[int] = None, + float_header: Optional[float] = None, + isotime_header: Optional[datetime.datetime] = None, + + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_xml = str_xml + self.int_xml = int_xml + self.float_header = float_header + self.isotime_header = isotime_header + + _attribute_map = { + "str_xml": {"tag": "xml", "rename": "StrField"}, + "int_xml": {"tag": "xml", "rename": "IntField", "type":"int"}, + "float_header": {"tag": "output", "position": "header", "rename": "x-oss-float", "type":"float"}, + "isotime_header": {"tag": "output", "position": "header", "rename": "x-oss-isotime", "type":"datetime"}, + } + + _xml_map = {'name': 'Root'} + + xml_data = r''' + + str-1 + 1234 + + ''' + + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + http_response=HttpResponseStub(data=xml_data) + ) + deserializer = [serde.deserialize_output_xmlbody] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('str-1', result.str_xml) + self.assertEqual(1234, result.int_xml) + + + xml_data = r''' + + str-1 + 1234 + + ''' + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + http_response=HttpResponseStub(data=xml_data) + ) + deserializer = [serde.deserialize_output_xmlbody] + try: + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.fail('should not here') + except exceptions.DeserializationError as err: + self.assertIn('Expect root tag is Root, gots InvalidRoot', str(err)) + except: + self.fail('should not here') + + + def test_deserialize_response_body_outline_node_xml_roottag(self): + class Configuration(serde.Model): + def __init__( + self, + str_xml: Optional[str] = None, + int_xml: Optional[int] = None, + + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.str_xml = str_xml + self.int_xml = int_xml + + _attribute_map = { + "str_xml": {"tag": "xml", "rename": "StrField"}, + "int_xml": {"tag": "xml", "rename": "IntField", "type":"int"}, + } + + _xml_map = {'name':'Root'} + + class PutApiResult(serde.ResultModel): + def __init__( + self, + config: Optional["Configuration"] = None, + + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.config = config + + _attribute_map = { + "config": {"tag": "output", "position": "body", "rename": "Root", "type":"Configuration,xml"}, + } + + _dependency_map = { + "Configuration": {"new":lambda :Configuration()} + } + + + xml_data = r''' + + str-1 + 1234 + + ''' + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + http_response=HttpResponseStub(data=xml_data) + ) + deserializer = [serde.deserialize_output_xmlbody] + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.assertEqual('OK', result.status) + self.assertEqual(200, result.status_code) + self.assertEqual('str-1', result.config.str_xml) + self.assertEqual(1234, result.config.int_xml) + + + xml_data = r''' + + str-1 + 1234 + + ''' + + result = PutApiResult() + op_output = OperationOutput( + status='OK', + status_code=200, + http_response=HttpResponseStub(data=xml_data) + ) + deserializer = [serde.deserialize_output_xmlbody] + try: + serde.deserialize_output(result, op_output, custom_deserializer=deserializer) + self.fail('should not here') + except exceptions.DeserializationError as err: + self.assertIn('Expect root tag is Root, gots InvalidRoot', str(err)) + except: + self.fail('should not here') + +class TestSerdePublicFunction(unittest.TestCase): + def test_serialize_time(self): + datetime_utc = datetime.datetime.fromtimestamp(1702783819, tz=datetime.timezone.utc) + self.assertEqual('2023-12-17T03:30:19.000000Z', serde.serialize_isotime(datetime_utc)) + self.assertEqual('Sun, 17 Dec 2023 03:30:19 GMT', serde.serialize_httptime(datetime_utc)) + self.assertEqual('1702783819', serde.serialize_unixtime(datetime_utc)) + + + def test_serialize_time(self): + datetime_utc = datetime.datetime.fromtimestamp(1702783819, tz=datetime.timezone.utc) + self.assertEqual(datetime_utc, serde.deserialize_iso('2023-12-17T03:30:19.000000Z')) + self.assertEqual(datetime_utc, serde.deserialize_httptime('Sun, 17 Dec 2023 03:30:19 GMT')) + self.assertEqual(datetime_utc, serde.deserialize_unixtime('1702783819')) + + def test_serialize_boolean(self): + self.assertTrue(serde.deserialize_boolean('True')) + self.assertTrue(serde.deserialize_boolean('true')) + self.assertTrue(serde.deserialize_boolean('TRUE')) + + self.assertFalse(serde.deserialize_boolean('')) + self.assertFalse(serde.deserialize_boolean('FALSE')) + self.assertFalse(serde.deserialize_boolean(None)) + + def test_copy_request(self): + class PutApiRequestSrc(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + str_header: Optional[str] = None, + int_header: Optional[int] = None, + bool_header: Optional[bool] = None, + float_header: Optional[float] = None, + isotime_header: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.str_header = str_header + self.int_header = int_header + self.bool_header = bool_header + self.float_header = float_header + self.isotime_header = isotime_header + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "str_header": {"tag": "input", "position": "header", "rename": "x-oss-str"}, + "int_header": {"tag": "input", "position": "header", "rename": "x-oss-int"}, + "bool_header": {"tag": "input", "position": "header", "rename": "x-oss-bool"}, + "float_header": {"tag": "input", "position": "header", "rename": "x-oss-float"}, + "isotime_header": {"tag": "input", "position": "header", "rename": "x-oss-isotime"}, + } + + + class PutApiRequestDst(serde.RequestModel): + def __init__( + self, + bucket: Optional[str] = None, + key: Optional[str] = None, + str_header: Optional[str] = None, + int_header: Optional[int] = None, + bool_header: Optional[bool] = None, + float_header: Optional[float] = None, + unixtime_header: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.key = key + self.str_header = str_header + self.int_header = int_header + self.bool_header = bool_header + self.float_header = float_header + self.unixtime_header = unixtime_header + + _attribute_map = { + "bucket": {"tag": "input", "position": "host", "required": True}, + "key": {"tag": "input", "position": "path", "required": True}, + "str_header": {"tag": "input", "position": "header", "rename": "x-oss-str"}, + "int_header": {"tag": "input", "position": "header", "rename": "x-oss-int"}, + "bool_header": {"tag": "input", "position": "header", "rename": "x-oss-bool"}, + "float_header": {"tag": "input", "position": "header", "rename": "x-oss-float"}, + "unixtime_header": {"tag": "input", "position": "header", "rename": "x-oss-isotime"}, + } + + src = PutApiRequestSrc( + bucket='bucket', + key='key', + str_header='str-1', + int_header=123, + bool_header=True, + float_header=1.5, + isotime_header=datetime.datetime.now() + ) + + dst = PutApiRequestDst() + self.assertIsNone(dst.bucket) + self.assertIsNone(dst.key) + self.assertIsNone(dst.str_header) + self.assertIsNone(dst.int_header) + self.assertIsNone(dst.bool_header) + self.assertIsNone(dst.float_header) + self.assertIsNone(dst.unixtime_header) + + serde.copy_request(dst, src) + self.assertEqual('bucket',dst.bucket) + self.assertEqual('key',dst.key) + self.assertEqual('str-1',dst.str_header) + self.assertEqual(123,dst.int_header) + self.assertEqual(True,dst.bool_header) + self.assertEqual(1.5,dst.float_header) + self.assertIsNone(dst.unixtime_header) + +class TestSerdeUtils(unittest.TestCase): + + def test_deserialize_process_body(self): + class JsonObjectResult(serde.ResultModel): + def __init__( + self, + bucket: Optional[str] = None, + file_size: Optional[int] = None, + key: Optional[str] = None, + process_status: Optional[str] = None, + **kwargs: Any + ) -> None: + super().__init__(**kwargs) + self.bucket = bucket + self.file_size = file_size + self.key = key + self.process_status = process_status + + _attribute_map = { + "bucket": {"tag": "json", "rename": "bucket"}, + "file_size": {"tag": "json", "rename": "fileSize", "type": "int"}, + "key": {"tag": "json", "rename": "object"}, + "process_status": {"tag": "json", "rename": "status"}, + } + + jsonstr = '{"bucket":"bucket-123","fileSize":1234,"object":"object-123","status":"ok"}' + + op_output = OperationOutput( + status='OK', + status_code=200, + http_response=HttpResponseStub(data=jsonstr) + ) + + result = serde_utils.deserialize_process_body(JsonObjectResult(), op_output) + result = cast(JsonObjectResult, result) + + self.assertEqual('bucket-123', result.bucket) + self.assertEqual(1234, result.file_size) + self.assertEqual('object-123', result.key) + self.assertEqual('ok', result.process_status) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000..84a9067 --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,24 @@ +# pylint: skip-file +import unittest +from alibabacloud_oss_v2 import utils + +class TestUtils(unittest.TestCase): + def test_escape_xml_value(self): + data = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' + encstr= '� ' + edata = utils.escape_xml_value(data.decode()) + self.assertEqual(encstr, edata) + + data = b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21\xe4\xbd\xa0\xe5\xa5\xbd' + encstr= ' !你好' + edata = utils.escape_xml_value(data.decode()) + self.assertEqual(encstr, edata) + + + data = '<>&"' + encstr= '<>&"' + edata = utils.escape_xml_value(data) + self.assertEqual(encstr, edata) + + + diff --git a/tests/unit/transport/__init__.py b/tests/unit/transport/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/transport/test_requests_client.py b/tests/unit/transport/test_requests_client.py new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/tests/unit/transport/test_requests_client.py @@ -0,0 +1 @@ +