Skip to content

Commit

Permalink
{storage-blob-preview} Support blob service properties with track 2 S…
Browse files Browse the repository at this point in the history
…DK (#2878)

* delete rention support

* support service propeties

* add release note

* enable logging

* enable logging

* fix linter
  • Loading branch information
Juliehzl committed Feb 3, 2021
1 parent c625914 commit 7687f91
Show file tree
Hide file tree
Showing 12 changed files with 958 additions and 71 deletions.
4 changes: 4 additions & 0 deletions src/storage-blob-preview/HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
Release History
===============
0.4.1
++++++
* `az storage blob service-properties`: Adopt new api version with track2 SDK

0.4.0
++++++
* Support blob url for blob related commands
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from azure.cli.core.commands.client_factory import get_mgmt_service_client
from azure.cli.core.profiles import ResourceType, get_sdk
from azure.cli.core.commands.client_factory import _prepare_client_kwargs_track2
from .profiles import CUSTOM_DATA_STORAGE_BLOB, CUSTOM_MGMT_STORAGE

MISSING_CREDENTIALS_ERROR_MESSAGE = """
Expand Down Expand Up @@ -55,24 +56,24 @@ def get_credential(kwargs):

def cf_blob_service(cli_ctx, kwargs):
from knack.util import CLIError
client_args = {}
client_kwargs = {}
t_blob_service = get_sdk(cli_ctx, CUSTOM_DATA_STORAGE_BLOB,
'_blob_service_client#BlobServiceClient')
connection_string = kwargs.pop('connection_string', None)
account_name = kwargs.pop('account_name', None)

location_mode = kwargs.pop('location_mode', None)
if location_mode:
client_args['_location_mode'] = location_mode

client_kwargs['_location_mode'] = location_mode
client_kwargs.update(_prepare_client_kwargs_track2(cli_ctx))
if connection_string:
return t_blob_service.from_connection_string(conn_str=connection_string)
return t_blob_service.from_connection_string(conn_str=connection_string, **client_kwargs)

account_url = get_account_url(cli_ctx, account_name=account_name, service='blob')
credential = get_credential(kwargs)

if account_url and credential:
return t_blob_service(account_url=account_url, credential=credential, **client_args)
return t_blob_service(account_url=account_url, credential=credential, **client_kwargs)
raise CLIError("Please provide valid connection string, or account name with account key, "
"sas token or login auth mode.")

Expand Down Expand Up @@ -121,34 +122,3 @@ def cf_blob_sas(cli_ctx, kwargs):

return t_blob_sas(account_name=kwargs.pop('account_name', None),
account_key=kwargs.pop('account_key', None))


def cf_adls_service(cli_ctx, kwargs):
t_adls_service = get_sdk(cli_ctx, ResourceType.DATA_STORAGE_FILEDATALAKE,
'_data_lake_service_client#DataLakeServiceClient')
connection_string = kwargs.pop('connection_string', None)
account_key = kwargs.pop('account_key', None)
token_credential = kwargs.pop('token_credential', None)
sas_token = kwargs.pop('sas_token', None)
if connection_string:
return t_adls_service.from_connection_string(connection_string=connection_string)

account_url = get_account_url(cli_ctx, account_name=kwargs.pop('account_name', None), service='dfs')
credential = account_key or sas_token or token_credential

if account_url and credential:
return t_adls_service(account_url=account_url, credential=credential)
return None


def cf_adls_file_system(cli_ctx, kwargs):
return cf_adls_service(cli_ctx, kwargs).get_file_system_client(file_system=kwargs.pop('file_system_name'))


def cf_adls_directory(cli_ctx, kwargs):
return cf_adls_file_system(cli_ctx, kwargs).get_directory_client(directory=kwargs.pop('directory_path'))


def cf_adls_file(cli_ctx, kwargs):
return cf_adls_service(cli_ctx, kwargs).get_file_client(file_system=kwargs.pop('file_system_name', None),
file_path=kwargs.pop('path', None))
20 changes: 20 additions & 0 deletions src/storage-blob-preview/azext_storage_blob_preview/_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,26 @@ def load_arguments(self, _): # pylint: disable=too-many-locals, too-many-statem
c.extra('snapshot', snapshot_type)
c.extra('if_tags_match_condition', tags_condition_type)

with self.argument_context('storage blob service-properties delete-policy update') as c:
c.argument('enable', arg_type=get_enum_type(['true', 'false']), help='Enables/disables soft-delete.')
c.argument('days_retained', type=int,
help='Number of days that soft-deleted blob will be retained. Must be in range [1,365].')

with self.argument_context('storage blob service-properties update', min_api='2018-03-28') as c:
c.argument('delete_retention', arg_type=get_three_state_flag(), arg_group='Soft Delete',
help='Enables soft-delete.')
c.argument('delete_retention_period', type=int, arg_group='Soft Delete',
help='Number of days that soft-deleted blob will be retained. Must be in range [1,365].')
c.argument('static_website', arg_group='Static Website', arg_type=get_three_state_flag(),
help='Enables static-website.')
c.argument('index_document', help='The default name of the index page under each directory.',
arg_group='Static Website')
c.argument('error_document_404_path', options_list=['--404-document'], arg_group='Static Website',
help='The absolute path of the custom 404 page.')
c.argument('default_index_document_path', options_list='--default-index-path', is_preview=True,
help='Absolute path of the default index page.',
arg_group='Static Website')

with self.argument_context('storage blob set-tier', resource_type=CUSTOM_DATA_STORAGE_BLOB) as c:
c.register_blob_arguments()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,23 @@ def transform_blob_json_output(result):
return new_result


def transform_blob_service_properties(result):
from azure.cli.core.commands.arm import make_camel_case
static_website = todict(result.pop("static_website", None))
static_website["errorDocument_404Path"] = static_website.pop("errorDocument404Path", None)
new_result = {
"cors": result.pop("cors", None),
"deleteRetentionPolicy": result.pop("delete_retention_policy", None),
"hourMetrics": result.pop("hour_metrics", None),
"logging": result.pop("analytics_logging", None),
"minuteMetrics": result.pop("minute_metrics", None),
"staticWebsite": static_website
}
for key in result:
new_result[make_camel_case(key)] = result[key]
return new_result


def transform_container_list_output(result):
for i, item in enumerate(result):
if isinstance(item, dict) and 'nextMarker' in item:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,11 @@ def _query_account_key(cli_ctx, account_name):
t_storage_account_keys = get_sdk(
cli_ctx, ResourceType.MGMT_STORAGE, 'models.storage_account_keys#StorageAccountKeys')

scf.config.enable_http_logger = False
logger.debug('Disable HTTP logging to avoid having storage keys in debug logs')
if t_storage_account_keys:
return scf.storage_accounts.list_keys(rg, account_name).key1
return scf.storage_accounts.list_keys(rg, account_name, logging_enable=False).key1
# of type: models.storage_account_list_keys_result#StorageAccountListKeysResult
return scf.storage_accounts.list_keys(rg, account_name).keys[0].value # pylint: disable=no-member
return scf.storage_accounts.list_keys(rg, account_name, logging_enable=False).keys[0].value # pylint: disable=no-member


def _query_account_rg(cli_ctx, account_name):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"azext.isPreview": true,
"azext.minCliCoreVersion": "2.14.0"
"azext.minCliCoreVersion": "2.16.0"
}
18 changes: 18 additions & 0 deletions src/storage-blob-preview/azext_storage_blob_preview/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,24 @@ def get_custom_sdk(custom_module, client_factory, resource_type=ResourceType.DAT
g.storage_custom_command_oauth('renew', 'renew_blob_lease')
g.storage_command_oauth('release', 'release')

with self.command_group('storage blob service-properties delete-policy', command_type=blob_service_sdk,
min_api='2019-02-02', resource_type=CUSTOM_DATA_STORAGE_BLOB,
custom_command_type=get_custom_sdk('blob', cf_blob_service)) as g:
g.storage_command_oauth('show', 'get_service_properties',
transform=lambda x: x.get('delete_retention_policy', x),
exception_handler=show_exception_handler)
g.storage_custom_command_oauth('update', 'set_delete_policy')

with self.command_group('storage blob service-properties', command_type=blob_service_sdk,
custom_command_type=get_custom_sdk('blob', cf_blob_service),
min_api='2019-02-02', resource_type=CUSTOM_DATA_STORAGE_BLOB) as g:
from ._transformers import transform_blob_service_properties
g.storage_command_oauth(
'show', 'get_service_properties', exception_handler=show_exception_handler,
transform=transform_blob_service_properties)
g.storage_custom_command_oauth('update', 'set_service_properties',
transform=transform_blob_service_properties)

# --auth-mode login need to verify
with self.command_group('storage blob tag', command_type=blob_client_sdk,
custom_command_type=get_custom_sdk('blob', cf_blob_client),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def set_blob_tier(client, container_name, blob_name, tier, blob_type='block', ti


def set_delete_policy(client, enable=None, days_retained=None):
policy = client.get_blob_service_properties().delete_retention_policy
policy = client.get_service_properties()['delete_retention_policy']

if enable is not None:
policy.enabled = enable == 'true'
Expand All @@ -66,47 +66,35 @@ def set_delete_policy(client, enable=None, days_retained=None):
if policy.enabled and not policy.days:
raise CLIError("must specify days-retained")

client.set_blob_service_properties(delete_retention_policy=policy)
return client.get_blob_service_properties().delete_retention_policy
client.set_service_properties(delete_retention_policy=policy)
return client.get_service_properties()['delete_retention_policy']


def set_service_properties(client, parameters, delete_retention=None, delete_retention_period=None,
static_website=None, index_document=None, error_document_404_path=None):
def set_service_properties(client, delete_retention=None, delete_retention_period=None,
static_website=None, index_document=None, error_document_404_path=None,
default_index_document_path=None, timeout=None):
properties = client.get_service_properties()

# update
kwargs = {}
if hasattr(parameters, 'delete_retention_policy'):
kwargs['delete_retention_policy'] = parameters.delete_retention_policy
if delete_retention is not None:
parameters.delete_retention_policy.enabled = delete_retention
properties['delete_retention_policy'].enabled = delete_retention
if delete_retention_period is not None:
parameters.delete_retention_policy.days = delete_retention_period
properties['delete_retention_policy'].days = delete_retention_period

if hasattr(parameters, 'static_website'):
kwargs['static_website'] = parameters.static_website
elif any(param is not None for param in [static_website, index_document, error_document_404_path]):
raise CLIError('Static websites are only supported for StorageV2 (general-purpose v2) accounts.')
if static_website is not None:
parameters.static_website.enabled = static_website
properties['static_website'].enabled = static_website
if index_document is not None:
parameters.static_website.index_document = index_document
properties['static_website'].index_document = index_document
if error_document_404_path is not None:
parameters.static_website.error_document_404_path = error_document_404_path
if hasattr(parameters, 'hour_metrics'):
kwargs['hour_metrics'] = parameters.hour_metrics
if hasattr(parameters, 'logging'):
kwargs['logging'] = parameters.logging
if hasattr(parameters, 'minute_metrics'):
kwargs['minute_metrics'] = parameters.minute_metrics
if hasattr(parameters, 'cors'):
kwargs['cors'] = parameters.cors

# checks
policy = kwargs.get('delete_retention_policy', None)
properties['static_website'].error_document404_path = error_document_404_path
if default_index_document_path is not None:
properties['static_website'].default_index_document_path = default_index_document_path
policy = properties.get('delete_retention_policy', None)
if policy and policy.enabled and not policy.days:
raise CLIError("must specify days-retained")

client.set_blob_service_properties(**kwargs)
return client.get_blob_service_properties()
client.set_service_properties(timeout=timeout, **properties)
return client.get_service_properties()


def storage_blob_copy_batch(cmd, client, source_client, container_name=None,
Expand Down
Loading

0 comments on commit 7687f91

Please sign in to comment.