diff --git a/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py b/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py index db31f8ab8000d..56a5cfc628b7e 100644 --- a/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py +++ b/sdk/storage/azure-storage-blob/samples/blob_samples_containers.py @@ -20,6 +20,7 @@ import os from datetime import datetime, timedelta +from azure.core.exceptions import ResourceExistsError SOURCE_FILE = 'SampleSource.txt' @@ -44,7 +45,7 @@ def container_sample(self): # [START create_container_client_sasurl] from azure.storage.blob import ContainerClient - sas_url = sas_url = "https://account.blob.core.windows.net/mycontainer?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" + sas_url = "https://account.blob.core.windows.net/mycontainer?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" container = ContainerClient.from_container_url(sas_url) # [END create_container_client_sasurl] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py index eebc38047f9c3..b5c981b0db66b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_directory_client.py @@ -37,19 +37,12 @@ class DataLakeDirectoryClient(PathClient): .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client] - :end-before: [END create_datalake_service_client] + .. literalinclude:: ../samples/datalake_samples_instantiate_client.py + :start-after: [START instantiate_directory_client_from_conn_str] + :end-before: [END instantiate_directory_client_from_conn_str] :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. - - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client_oauth] - :end-before: [END create_datalake_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with Azure Identity credentials. + :dedent: 4 + :caption: Creating the DataLakeServiceClient from connection string. """ def __init__( self, account_url, # type: str @@ -95,18 +88,17 @@ def from_connection_string( account_url, file_system_name=file_system_name, directory_name=directory_name, credential=credential, **kwargs) - def create_directory(self, content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] + def create_directory(self, metadata=None, # type: Optional[Dict[str, str]] **kwargs): # type: (...) -> Dict[str, Union[str, datetime]] """ Create a new directory. - :param ~azure.storage.filedatalake.ContentSettings content_settings: - ContentSettings object used to set path properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) + :keyword ~azure.storage.filedatalake.ContentSettings content_settings: + ContentSettings object used to set path properties. :keyword lease: Required if the file has an active lease. Value can be a DataLakeLeaseClient object or the lease ID as a string. @@ -146,8 +138,17 @@ def create_directory(self, content_settings=None, # type: Optional[ContentSetti :keyword int timeout: The timeout parameter is expressed in seconds. :return: response dict (Etag and last modified). + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory.py + :start-after: [START create_directory] + :end-before: [END create_directory] + :language: python + :dedent: 8 + :caption: Create directory. """ - return self._create('directory', content_settings=content_settings, metadata=metadata, **kwargs) + return self._create('directory', metadata=metadata, **kwargs) def delete_directory(self, **kwargs): # type: (...) -> None @@ -178,6 +179,15 @@ def delete_directory(self, **kwargs): :keyword int timeout: The timeout parameter is expressed in seconds. :return: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory.py + :start-after: [START delete_directory] + :end-before: [END delete_directory] + :language: python + :dedent: 4 + :caption: Delete directory. """ return self._delete(**kwargs) @@ -213,11 +223,11 @@ def get_directory_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] + .. literalinclude:: ../samples/datalake_samples_directory.py + :start-after: [START get_directory_properties] + :end-before: [END get_directory_properties] :language: python - :dedent: 8 + :dedent: 4 :caption: Getting the properties for a file/directory. """ blob_properties = self._get_path_properties(**kwargs) @@ -296,6 +306,15 @@ def rename_directory(self, new_name, # type: str :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory.py + :start-after: [START rename_directory] + :end-before: [END rename_directory] + :language: python + :dedent: 4 + :caption: Rename the source directory. """ new_name = new_name.strip('/') new_file_system = new_name.split('/')[0] @@ -312,7 +331,6 @@ def rename_directory(self, new_name, # type: str return new_directory_client def create_sub_directory(self, sub_directory, # type: Union[DirectoryProperties, str] - content_settings=None, # type: Optional[ContentSettings] metadata=None, # type: Optional[Dict[str, str]] **kwargs): # type: (...) -> DataLakeDirectoryClient @@ -323,11 +341,11 @@ def create_sub_directory(self, sub_directory, # type: Union[DirectoryProperties The directory with which to interact. This can either be the name of the directory, or an instance of DirectoryProperties. :type sub_directory: str or ~azure.storage.filedatalake.DirectoryProperties - :param ~azure.storage.filedatalake.ContentSettings content_settings: - ContentSettings object used to set path properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) + :keyword ~azure.storage.filedatalake.ContentSettings content_settings: + ContentSettings object used to set path properties. :keyword lease: Required if the file has an active lease. Value can be a DataLakeLeaseClient object or the lease ID as a string. @@ -369,7 +387,7 @@ def create_sub_directory(self, sub_directory, # type: Union[DirectoryProperties :return: DataLakeDirectoryClient for the subdirectory. """ subdir = self.get_sub_directory_client(sub_directory) - subdir.create_directory(content_settings=content_settings, metadata=metadata, **kwargs) + subdir.create_directory(metadata=metadata, **kwargs) return subdir def delete_sub_directory(self, sub_directory, # type: Union[DirectoryProperties, str] @@ -483,15 +501,6 @@ def get_file_client(self, file # type: Union[FileProperties, str] :type file: str or ~azure.storage.filedatalake.FileProperties :returns: A DataLakeFileClient. :rtype: ~azure.storage.filedatalake..DataLakeFileClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_file_client] - :end-before: [END bsc_get_file_client] - :language: python - :dedent: 12 - :caption: Getting the file client to interact with a specific file. """ try: file_path = file.name @@ -518,15 +527,6 @@ def get_sub_directory_client(self, sub_directory # type: Union[DirectoryPropert :type sub_directory: str or ~azure.storage.filedatalake.DirectoryProperties :returns: A DataLakeDirectoryClient. :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_directory_client] - :end-before: [END bsc_get_directory_client] - :language: python - :dedent: 12 - :caption: Getting the directory client to interact with a specific directory. """ try: subdir_path = sub_directory.name diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py index 8f89b2dda23c1..be7dd9fae9d0e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_file_client.py @@ -44,19 +44,12 @@ class DataLakeFileClient(PathClient): .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client] - :end-before: [END create_datalake_service_client] + .. literalinclude:: ../samples/datalake_samples_instantiate_client.py + :start-after: [START instantiate_file_client_from_conn_str] + :end-before: [END instantiate_file_client_from_conn_str] :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. - - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client_oauth] - :end-before: [END create_datalake_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with Azure Identity credentials. + :dedent: 4 + :caption: Creating the DataLakeServiceClient from connection string. """ def __init__( self, account_url, # type: str @@ -153,6 +146,15 @@ def create_file(self, content_settings=None, # type: Optional[ContentSettings] :keyword int timeout: The timeout parameter is expressed in seconds. :return: response dict (Etag and last modified). + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START create_file] + :end-before: [END create_file] + :language: python + :dedent: 4 + :caption: Create file. """ return self._create('file', content_settings=content_settings, metadata=metadata, **kwargs) @@ -185,6 +187,15 @@ def delete_file(self, **kwargs): :keyword int timeout: The timeout parameter is expressed in seconds. :return: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START delete_file] + :end-before: [END delete_file] + :language: python + :dedent: 4 + :caption: Delete file. """ return self._delete(**kwargs) @@ -220,12 +231,12 @@ def get_file_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START get_file_properties] + :end-before: [END get_file_properties] :language: python - :dedent: 8 - :caption: Getting the properties for a file/directory. + :dedent: 4 + :caption: Getting the properties for a file. """ blob_properties = self._get_path_properties(**kwargs) return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access @@ -382,6 +393,15 @@ def append_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[AnyStr]] or the lease ID as a string. :paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str :return: dict of the response header + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START append_data] + :end-before: [END append_data] + :language: python + :dedent: 4 + :caption: Append data to the file. """ options = self._append_data_options( data, @@ -464,6 +484,15 @@ def flush_data(self, offset, # type: int :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :return: response header in dict + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START upload_file_to_file_system] + :end-before: [END upload_file_to_file_system] + :language: python + :dedent: 8 + :caption: Commit the previous appended data. """ options = self._flush_data_options( offset, @@ -521,12 +550,12 @@ def read_file(self, offset=None, # type: Optional[int] .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_hello_world.py - :start-after: [START download_a_blob] - :end-before: [END download_a_blob] + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START read_file] + :end-before: [END read_file] :language: python - :dedent: 12 - :caption: Download a blob. + :dedent: 4 + :caption: Return the downloaded data. """ downloader = self._blob_client.download_blob(offset=offset, length=length, **kwargs) if stream: @@ -603,7 +632,17 @@ def rename_file(self, new_name, # type: str The source match condition to use upon the etag. :keyword int timeout: The timeout parameter is expressed in seconds. - :return: + :return: the renamed file client + :rtype: DataLakeFileClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download.py + :start-after: [START rename_file] + :end-before: [END rename_file] + :language: python + :dedent: 4 + :caption: Rename the source file. """ new_name = new_name.strip('/') new_file_system = new_name.split('/')[0] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py index 8b8a0acc405a5..bea7f52c17eb0 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_data_lake_service_client.py @@ -46,14 +46,14 @@ class DataLakeServiceClient(StorageAccountHostsMixin): .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py + .. literalinclude:: ../samples/datalake_samples_service.py :start-after: [START create_datalake_service_client] :end-before: [END create_datalake_service_client] :language: python :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. + :caption: Creating the DataLakeServiceClient from connection string. - .. literalinclude:: ../samples/test_datalake_authentication_samples.py + .. literalinclude:: ../samples/datalake_samples_service.py :start-after: [START create_datalake_service_client_oauth] :end-before: [END create_datalake_service_client_oauth] :language: python @@ -89,6 +89,18 @@ def __init__( # ADLS doesn't support secondary endpoint, make sure it's empty self._hosts[LocationMode.SECONDARY] = "" + def __exit__(self, *args): + self._blob_service_client.close() + super(DataLakeServiceClient, self).__exit__(*args) + + def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._blob_service_client.close() + self.__exit__() + def _format_url(self, hostname): """Format the endpoint URL according to hostname """ @@ -114,6 +126,15 @@ def from_connection_string( Credentials provided here will take precedence over those in the connection string. :return a DataLakeServiceClient :rtype ~azure.storage.filedatalake.DataLakeServiceClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START create_data_lake_service_client_from_conn_str] + :end-before: [END create_data_lake_service_client_from_conn_str] + :language: python + :dedent: 8 + :caption: Creating the DataLakeServiceClient from a connection string. """ account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') return cls(account_url, credential=credential, **kwargs) @@ -135,6 +156,15 @@ def get_user_delegation_key(self, key_start_time, # type: datetime The timeout parameter is expressed in seconds. :return: The user delegation key. :rtype: ~azure.storage.filedatalake.UserDelegationKey + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START get_user_delegation_key] + :end-before: [END get_user_delegation_key] + :language: python + :dedent: 8 + :caption: Get user delegation key from datalake service client. """ delegation_key = self._blob_service_client.get_user_delegation_key(key_start_time=key_start_time, key_expiry_time=key_expiry_time, @@ -166,11 +196,11 @@ def list_file_systems(self, name_starts_with=None, # type: Optional[str] .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START dsc_list_file_systems] - :end-before: [END dsc_list_file_systems] + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START list_file_systems] + :end-before: [END list_file_systems] :language: python - :dedent: 12 + :dedent: 8 :caption: Listing the file systems in the datalake service. """ item_paged = self._blob_service_client.list_containers(name_starts_with=name_starts_with, @@ -205,11 +235,11 @@ def create_file_system(self, file_system, # type: Union[FileSystemProperties, s .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START dsc_create_file_system] - :end-before: [END dsc_create_file_system] + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START create_file_system_from_service_client] + :end-before: [END create_file_system_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Creating a file system in the datalake service. """ file_system_client = self.get_file_system_client(file_system) @@ -256,11 +286,11 @@ def delete_file_system(self, file_system, # type: Union[FileSystemProperties, s .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_delete_file_system] - :end-before: [END bsc_delete_file_system] + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START delete_file_system_from_service_client] + :end-before: [END delete_file_system_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Deleting a file system in the datalake service. """ file_system_client = self.get_file_system_client(file_system) @@ -283,14 +313,20 @@ def get_file_system_client(self, file_system # type: Union[FileSystemProperties .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_file_system_client] - :end-before: [END bsc_get_file_system_client] + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START create_file_system_client_from_service] + :end-before: [END create_file_system_client_from_service] :language: python :dedent: 8 :caption: Getting the file system client to interact with a specific file system. """ - return FileSystemClient(self.url, file_system, credential=self._raw_credential, _configuration=self._config, + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system + + return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, + _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) @@ -316,14 +352,22 @@ def get_directory_client(self, file_system, # type: Union[FileSystemProperties, .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_directory_client] - :end-before: [END bsc_get_directory_client] + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START get_directory_client_from_service_client] + :end-before: [END get_directory_client_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the directory client to interact with a specific directory. """ - return DataLakeDirectoryClient(self.url, file_system, directory_name=directory, + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system + try: + directory_name = directory.name + except AttributeError: + directory_name = directory + return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, credential=self._raw_credential, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, @@ -353,20 +397,24 @@ def get_file_client(self, file_system, # type: Union[FileSystemProperties, str] .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_file_client] - :end-before: [END bsc_get_file_client] + .. literalinclude:: ../samples/datalake_samples_service.py + :start-after: [START get_file_client_from_service_client] + :end-before: [END get_file_client_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the file client to interact with a specific file. """ + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system try: file_path = file_path.name except AttributeError: pass return DataLakeFileClient( - self.url, file_system, file_path=file_path, credential=self._raw_credential, + self.url, file_system_name, file_path=file_path, credential=self._raw_credential, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py index 95ca7b967aa91..c54dab0081845 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_file_system_client.py @@ -49,19 +49,12 @@ class FileSystemClient(StorageAccountHostsMixin): .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START create_file_system_client_from_service] :end-before: [END create_file_system_client_from_service] :language: python :dedent: 8 :caption: Get a FileSystemClient from an existing DataLakeServiceClient. - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START create_file_system_client_sasurl] - :end-before: [END create_file_system_client_sasurl] - :language: python - :dedent: 8 - :caption: Creating the FileSystemClient client directly. """ def __init__( self, account_url, # type: str @@ -113,6 +106,18 @@ def _format_url(self, hostname): quote(file_system_name), self._query_str) + def __exit__(self, *args): + self._container_client.close() + super(FileSystemClient, self).__exit__(*args) + + def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._container_client.close() + self.__exit__() + @classmethod def from_connection_string( cls, conn_str, # type: str @@ -135,6 +140,15 @@ def from_connection_string( Credentials provided here will take precedence over those in the connection string. :return a FileSystemClient :rtype ~azure.storage.filedatalake.FileSystemClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START create_file_system_client_from_connection_string] + :end-before: [END create_file_system_client_from_connection_string] + :language: python + :dedent: 8 + :caption: Create FileSystemClient from connection string """ account_url, _, credential = parse_connection_str(conn_str, credential, 'dfs') return cls( @@ -183,12 +197,12 @@ def acquire_lease( .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START acquire_lease_on_file_system] :end-before: [END acquire_lease_on_file_system] :language: python :dedent: 8 - :caption: Acquiring a lease on the file_system. + :caption: Acquiring a lease on the file system. """ lease = DataLakeLeaseClient(self, lease_id=lease_id) lease.acquire(lease_duration=lease_duration, **kwargs) @@ -217,7 +231,7 @@ def create_file_system(self, metadata=None, # type: Optional[Dict[str, str]] .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START create_file_system] :end-before: [END create_file_system] :language: python @@ -262,7 +276,7 @@ def delete_file_system(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START delete_file_system] :end-before: [END delete_file_system] :language: python @@ -286,7 +300,7 @@ def get_file_system_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START get_file_system_properties] :end-before: [END get_file_system_properties] :language: python @@ -336,12 +350,12 @@ def set_file_system_metadata( # type: ignore .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START set_file_system_metadata] :end-before: [END set_file_system_metadata] :language: python :dedent: 12 - :caption: Setting metadata on the container. + :caption: Setting metadata on the file system. """ return self._container_client.set_container_metadata(metadata=metadata, **kwargs) @@ -436,12 +450,12 @@ def get_paths(self, path=None, # type: Optional[str] .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_containers.py - :start-after: [START list_blobs_in_container] - :end-before: [END list_blobs_in_container] + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START get_paths_in_file_system] + :end-before: [END get_paths_in_file_system] :language: python :dedent: 8 - :caption: List the blobs in the container. + :caption: List the paths in the file system. """ timeout = kwargs.pop('timeout', None) command = functools.partial( @@ -508,6 +522,15 @@ def create_directory(self, directory, # type: Union[DirectoryProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START create_directory_from_file_system] + :end-before: [END create_directory_from_file_system] + :language: python + :dedent: 8 + :caption: Create directory in the file system. """ directory_client = self.get_directory_client(directory) directory_client.create_directory(metadata=metadata, **kwargs) @@ -547,6 +570,15 @@ def delete_directory(self, directory, # type: Union[DirectoryProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START delete_directory_from_file_system] + :end-before: [END delete_directory_from_file_system] + :language: python + :dedent: 8 + :caption: Delete directory in the file system. """ directory_client = self.get_directory_client(directory) directory_client.delete_directory(**kwargs) @@ -606,6 +638,15 @@ def create_file(self, file, # type: Union[FileProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeFileClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START create_file_from_file_system] + :end-before: [END create_file_from_file_system] + :language: python + :dedent: 8 + :caption: Create file in the file system. """ file_client = self.get_file_client(file) file_client.create_file(**kwargs) @@ -646,6 +687,15 @@ def delete_file(self, file, # type: Union[FileProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeFileClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system.py + :start-after: [START delete_file_from_file_system] + :end-before: [END delete_file_from_file_system] + :language: python + :dedent: 8 + :caption: Delete file in the file system. """ file_client = self.get_file_client(file) file_client.delete_file(lease=lease, **kwargs) @@ -676,14 +726,19 @@ def get_directory_client(self, directory # type: Union[DirectoryProperties, str .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START get_directory_client_from_file_system] :end-before: [END get_directory_client_from_file_system] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the directory client to interact with a specific directory. """ - return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory, + try: + directory_name = directory.name + except AttributeError: + directory_name = directory + + return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, credential=self._raw_credential, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, @@ -708,11 +763,11 @@ def get_file_client(self, file_path # type: Union[FileProperties, str] .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system.py :start-after: [START get_file_client_from_file_system] :end-before: [END get_file_client_from_file_system] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the file client to interact with a specific file. """ try: diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_data_lake_storage_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_data_lake_storage_client_async.py index db3e60e91bad8..3f41f1bd7566e 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_data_lake_storage_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_generated/aio/_data_lake_storage_client_async.py @@ -64,3 +64,9 @@ async def __aenter__(self): return self async def __aexit__(self, *exc_details): await self._client.__aexit__(*exc_details) + + async def close(self): + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._client.close() diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py index fc2ffaf27287d..5a279a7195112 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py @@ -80,6 +80,18 @@ def __init__( self._hosts[LocationMode.SECONDARY] = "" self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) + def __exit__(self, *args): + self._blob_client.close() + super(PathClient, self).__exit__(*args) + + def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._blob_client.close() + self.__exit__() + def _format_url(self, hostname): file_system_name = self.file_system_name if isinstance(file_system_name, six.text_type): @@ -566,15 +578,6 @@ def set_metadata(self, metadata=None, # type: Optional[Dict[str, str]] :keyword int timeout: The timeout parameter is expressed in seconds. :returns: file system-updated property dict (Etag and last modified). - - .. admonition:: Example: - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START set_file_system_metadata] - :end-before: [END set_file_system_metadata] - :language: python - :dedent: 12 - :caption: Setting metadata on the container. """ return self._blob_client.set_blob_metadata(metadata=metadata, **kwargs) @@ -653,15 +656,6 @@ def acquire_lease(self, lease_duration=-1, # type: Optional[int] The timeout parameter is expressed in seconds. :returns: A DataLakeLeaseClient object, that can be run in a context manager. :rtype: ~azure.storage.filedatalake.DataLakeLeaseClient - - .. admonition:: Example: - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START acquire_lease_on_file_system] - :end-before: [END acquire_lease_on_file_system] - :language: python - :dedent: 8 - :caption: Acquiring a lease on the file_system. """ lease = DataLakeLeaseClient(self, lease_id=lease_id) # type: ignore lease.acquire(lease_duration=lease_duration, **kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py index b2e3c11a5a693..197b7fb79fc0f 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_directory_client_async.py @@ -37,19 +37,12 @@ class DataLakeDirectoryClient(PathClient, DataLakeDirectoryClientBase): .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client] - :end-before: [END create_datalake_service_client] + .. literalinclude:: ../samples/datalake_samples_instantiate_client_async.py + :start-after: [START instantiate_directory_client_from_conn_str] + :end-before: [END instantiate_directory_client_from_conn_str] :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. - - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client_oauth] - :end-before: [END create_datalake_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with Azure Identity credentials. + :dedent: 4 + :caption: Creating the DataLakeServiceClient from connection string. """ def __init__( @@ -63,18 +56,17 @@ def __init__( super(DataLakeDirectoryClient, self).__init__(account_url, file_system_name, directory_name, # pylint: disable=specify-parameter-names-in-call credential=credential, **kwargs) - async def create_directory(self, content_settings=None, # type: Optional[ContentSettings] - metadata=None, # type: Optional[Dict[str, str]] + async def create_directory(self, metadata=None, # type: Optional[Dict[str, str]] **kwargs): # type: (...) -> Dict[str, Union[str, datetime]] """ Create a new directory. - :param ~azure.storage.filedatalake.ContentSettings content_settings: - ContentSettings object used to set path properties. :param metadata: Name-value pairs associated with the directory as metadata. :type metadata: dict(str, str) + :keyword ~azure.storage.filedatalake.ContentSettings content_settings: + ContentSettings object used to set path properties. :keyword lease: Required if the directory has an active lease. Value can be a DataLakeLeaseClient object or the lease ID as a string. @@ -114,8 +106,17 @@ async def create_directory(self, content_settings=None, # type: Optional[Conten :keyword int timeout: The timeout parameter is expressed in seconds. :return: response dict (Etag and last modified). + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory_async.py + :start-after: [START create_directory] + :end-before: [END create_directory] + :language: python + :dedent: 8 + :caption: Create directory. """ - return await self._create('directory', content_settings=content_settings, metadata=metadata, **kwargs) + return await self._create('directory', metadata=metadata, **kwargs) async def delete_directory(self, **kwargs): # type: (...) -> None @@ -146,6 +147,15 @@ async def delete_directory(self, **kwargs): :keyword int timeout: The timeout parameter is expressed in seconds. :return: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory_async.py + :start-after: [START delete_directory] + :end-before: [END delete_directory] + :language: python + :dedent: 4 + :caption: Delete directory. """ return await self._delete(**kwargs) @@ -181,11 +191,11 @@ async def get_directory_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] + .. literalinclude:: ../samples/datalake_samples_directory_async.py + :start-after: [START get_directory_properties] + :end-before: [END get_directory_properties] :language: python - :dedent: 8 + :dedent: 4 :caption: Getting the properties for a file/directory. """ blob_properties = await self._get_path_properties(**kwargs) @@ -264,6 +274,15 @@ async def rename_directory(self, new_name, # type: str :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_directory_async.py + :start-after: [START rename_directory] + :end-before: [END rename_directory] + :language: python + :dedent: 4 + :caption: Rename the source directory. """ new_name = new_name.strip('/') new_file_system = new_name.split('/')[0] @@ -280,7 +299,6 @@ async def rename_directory(self, new_name, # type: str return new_directory_client async def create_sub_directory(self, sub_directory, # type: Union[DirectoryProperties, str] - content_settings=None, # type: Optional[ContentSettings] metadata=None, # type: Optional[Dict[str, str]] **kwargs): # type: (...) -> DataLakeDirectoryClient @@ -291,11 +309,11 @@ async def create_sub_directory(self, sub_directory, # type: Union[DirectoryProp The directory with which to interact. This can either be the name of the directory, or an instance of DirectoryProperties. :type sub_directory: str or ~azure.storage.filedatalake.DirectoryProperties - :param ~azure.storage.filedatalake.ContentSettings content_settings: - ContentSettings object used to set path properties. :param metadata: Name-value pairs associated with the file as metadata. :type metadata: dict(str, str) + :keyword ~azure.storage.filedatalake.ContentSettings content_settings: + ContentSettings object used to set path properties. :keyword lease: Required if the file has an active lease. Value can be a DataLakeLeaseClient object or the lease ID as a string. @@ -337,7 +355,7 @@ async def create_sub_directory(self, sub_directory, # type: Union[DirectoryProp :return: DataLakeDirectoryClient for the subdirectory. """ subdir = self.get_sub_directory_client(sub_directory) - await subdir.create_directory(content_settings=content_settings, metadata=metadata, **kwargs) + await subdir.create_directory(metadata=metadata, **kwargs) return subdir async def delete_sub_directory(self, sub_directory, # type: Union[DirectoryProperties, str] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py index 4087a152a318e..d6efe9d026608 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_file_client_async.py @@ -38,19 +38,12 @@ class DataLakeFileClient(PathClient, DataLakeFileClientBase): .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client] - :end-before: [END create_datalake_service_client] + .. literalinclude:: ../samples/datalake_samples_instantiate_client_async.py + :start-after: [START instantiate_file_client_from_conn_str] + :end-before: [END instantiate_file_client_from_conn_str] :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. - - .. literalinclude:: ../samples/test_datalake_authentication_samples.py - :start-after: [START create_datalake_service_client_oauth] - :end-before: [END create_datalake_service_client_oauth] - :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with Azure Identity credentials. + :dedent: 4 + :caption: Creating the DataLakeServiceClient from connection string. """ def __init__( @@ -115,6 +108,15 @@ async def create_file(self, content_settings=None, # type: Optional[ContentSett :keyword int timeout: The timeout parameter is expressed in seconds. :return: response dict (Etag and last modified). + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START create_file] + :end-before: [END create_file] + :language: python + :dedent: 4 + :caption: Create file. """ return await self._create('file', content_settings=content_settings, metadata=metadata, **kwargs) @@ -147,6 +149,15 @@ async def delete_file(self, **kwargs): :keyword int timeout: The timeout parameter is expressed in seconds. :return: None + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START delete_file] + :end-before: [END delete_file] + :language: python + :dedent: 4 + :caption: Delete file. """ return await self._delete(**kwargs) @@ -182,12 +193,12 @@ async def get_file_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START get_file_properties] + :end-before: [END get_file_properties] :language: python - :dedent: 8 - :caption: Getting the properties for a file/directory. + :dedent: 4 + :caption: Getting the properties for a file. """ blob_properties = await self._get_path_properties(**kwargs) return FileProperties._from_blob_properties(blob_properties) # pylint: disable=protected-access @@ -274,6 +285,15 @@ async def append_data(self, data, # type: Union[AnyStr, Iterable[AnyStr], IO[An or the lease ID as a string. :paramtype lease: ~azure.storage.filedatalake.aio.DataLakeLeaseClient or str :return: dict of the response header + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START append_data] + :end-before: [END append_data] + :language: python + :dedent: 4 + :caption: Append data to the file. """ options = self._append_data_options( data, @@ -332,6 +352,15 @@ async def flush_data(self, offset, # type: int :keyword ~azure.core.MatchConditions match_condition: The match condition to use upon the etag. :return: response header in dict + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START upload_file_to_file_system] + :end-before: [END upload_file_to_file_system] + :language: python + :dedent: 12 + :caption: Commit the previous appended data. """ options = self._flush_data_options( offset, @@ -389,12 +418,12 @@ async def read_file(self, offset=None, # type: Optional[int] .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_hello_world.py - :start-after: [START download_a_blob] - :end-before: [END download_a_blob] + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START read_file] + :end-before: [END read_file] :language: python - :dedent: 12 - :caption: Download a blob. + :dedent: 4 + :caption: Return the downloaded data. """ downloader = await self._blob_client.download_blob(offset=offset, length=length, **kwargs) if stream: @@ -471,7 +500,17 @@ async def rename_file(self, new_name, # type: str The source match condition to use upon the etag. :keyword int timeout: The timeout parameter is expressed in seconds. - :return: + :return: the renamed file client + :rtype: DataLakeFileClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_upload_download_async.py + :start-after: [START rename_file] + :end-before: [END rename_file] + :language: python + :dedent: 4 + :caption: Rename the source file. """ new_name = new_name.strip('/') new_file_system = new_name.split('/')[0] diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py index a2580d19bcb66..205a4ec4f6b5a 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_data_lake_service_client_async.py @@ -43,18 +43,18 @@ class DataLakeServiceClient(AsyncStorageAccountHostsMixin, DataLakeServiceClient .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_authentication_samples.py + .. literalinclude:: ../samples/datalake_samples_service_async.py :start-after: [START create_datalake_service_client] :end-before: [END create_datalake_service_client] :language: python - :dedent: 8 - :caption: Creating the DataLakeServiceClient with account url and credential. + :dedent: 4 + :caption: Creating the DataLakeServiceClient from connection string. - .. literalinclude:: ../samples/test_datalake_authentication_samples.py + .. literalinclude:: ../samples/datalake_samples_service_async.py :start-after: [START create_datalake_service_client_oauth] :end-before: [END create_datalake_service_client_oauth] :language: python - :dedent: 8 + :dedent: 4 :caption: Creating the DataLakeServiceClient with Azure Identity credentials. """ @@ -75,6 +75,18 @@ def __init__( self._client = DataLakeStorageClient(self.url, None, None, pipeline=self._pipeline) self._loop = kwargs.get('loop', None) + async def __aexit__(self, *args): + await self._blob_service_client.close() + await super(DataLakeServiceClient, self).__aexit__(*args) + + async def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._blob_service_client.close() + await self.__aexit__() + async def get_user_delegation_key(self, key_start_time, # type: datetime key_expiry_time, # type: datetime **kwargs # type: Any @@ -92,6 +104,15 @@ async def get_user_delegation_key(self, key_start_time, # type: datetime The timeout parameter is expressed in seconds. :return: The user delegation key. :rtype: ~azure.storage.filedatalake.UserDelegationKey + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START get_user_delegation_key] + :end-before: [END get_user_delegation_key] + :language: python + :dedent: 8 + :caption: Get user delegation key from datalake service client. """ delegation_key = await self._blob_service_client.get_user_delegation_key( key_start_time=key_start_time, @@ -124,11 +145,11 @@ def list_file_systems(self, name_starts_with=None, # type: Optional[str] .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START dsc_list_file_systems] - :end-before: [END dsc_list_file_systems] + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START list_file_systems] + :end-before: [END list_file_systems] :language: python - :dedent: 12 + :dedent: 8 :caption: Listing the file systems in the datalake service. """ item_paged = self._blob_service_client.list_containers(name_starts_with=name_starts_with, @@ -163,11 +184,11 @@ async def create_file_system(self, file_system, # type: Union[FileSystemPropert .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START dsc_create_file_system] - :end-before: [END dsc_create_file_system] + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START create_file_system_from_service_client] + :end-before: [END create_file_system_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Creating a file system in the datalake service. """ file_system_client = self.get_file_system_client(file_system) @@ -214,11 +235,11 @@ async def delete_file_system(self, file_system, # type: Union[FileSystemPropert .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_delete_file_system] - :end-before: [END bsc_delete_file_system] + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START delete_file_system_from_service_client] + :end-before: [END delete_file_system_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Deleting a file system in the datalake service. """ file_system_client = self.get_file_system_client(file_system) @@ -241,14 +262,20 @@ def get_file_system_client(self, file_system # type: Union[FileSystemProperties .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_file_system_client] - :end-before: [END bsc_get_file_system_client] + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START create_file_system_client_from_service] + :end-before: [END create_file_system_client_from_service] :language: python :dedent: 8 :caption: Getting the file system client to interact with a specific file system. """ - return FileSystemClient(self.url, file_system, credential=self._raw_credential, _configuration=self._config, + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system + + return FileSystemClient(self.url, file_system_name, credential=self._raw_credential, + _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, key_resolver_function=self.key_resolver_function) @@ -274,14 +301,22 @@ def get_directory_client(self, file_system, # type: Union[FileSystemProperties, .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_directory_client] - :end-before: [END bsc_get_directory_client] + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START get_directory_client_from_service_client] + :end-before: [END get_directory_client_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the directory client to interact with a specific directory. """ - return DataLakeDirectoryClient(self.url, file_system, directory_name=directory, + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system + try: + directory_name = directory.name + except AttributeError: + directory_name = directory + return DataLakeDirectoryClient(self.url, file_system_name, directory_name=directory_name, credential=self._raw_credential, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, @@ -311,20 +346,24 @@ def get_file_client(self, file_system, # type: Union[FileSystemProperties, str] .. admonition:: Example: - .. literalinclude:: ../samples/test_datalake_service_samples.py - :start-after: [START bsc_get_file_client] - :end-before: [END bsc_get_file_client] + .. literalinclude:: ../samples/datalake_samples_service_async.py + :start-after: [START get_file_client_from_service_client] + :end-before: [END get_file_client_from_service_client] :language: python - :dedent: 12 + :dedent: 8 :caption: Getting the file client to interact with a specific file. """ + try: + file_system_name = file_system.name + except AttributeError: + file_system_name = file_system try: file_path = file_path.name except AttributeError: pass return DataLakeFileClient( - self.url, file_system, file_path=file_path, credential=self._raw_credential, + self.url, file_system_name, file_path=file_path, credential=self._raw_credential, _hosts=self._hosts, _configuration=self._config, _pipeline=self._pipeline, require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key, diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py index 10b8eb56c6d99..0547888a66752 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_file_system_client_async.py @@ -10,6 +10,8 @@ Union, Optional, Any, Dict, TYPE_CHECKING ) +from azure.core.tracing.decorator import distributed_trace + from azure.core.async_paging import AsyncItemPaged from azure.core.tracing.decorator_async import distributed_trace_async @@ -55,21 +57,14 @@ class FileSystemClient(AsyncStorageAccountHostsMixin, FileSystemClientBase): shared access key, or an instance of a TokenCredentials class from azure.identity. If the URL already has a SAS token, specifying an explicit credential will take priority. - .. admonition:: Example: - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START create_file_system_client_from_service] - :end-before: [END create_file_system_client_from_service] - :language: python - :dedent: 8 - :caption: Get a FileSystemClient from an existing DataLakeServiceClient. - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START create_file_system_client_sasurl] - :end-before: [END create_file_system_client_sasurl] - :language: python - :dedent: 8 - :caption: Creating the FileSystemClient client directly. + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START create_file_system_client_from_service] + :end-before: [END create_file_system_client_from_service] + :language: python + :dedent: 8 + :caption: Get a FileSystemClient from an existing DataLakeServiceClient. """ def __init__( @@ -94,6 +89,18 @@ def __init__( self._client = DataLakeStorageClient(self.url, file_system_name, None, pipeline=self._pipeline) self._loop = kwargs.get('loop', None) + async def __aexit__(self, *args): + await self._container_client.close() + await super(FileSystemClient, self).__aexit__(*args) + + async def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._container_client.close() + await self.__aexit__() + @distributed_trace_async async def acquire_lease( self, lease_duration=-1, # type: int @@ -138,17 +145,18 @@ async def acquire_lease( .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START acquire_lease_on_file_system] :end-before: [END acquire_lease_on_file_system] :language: python - :dedent: 8 + :dedent: 12 :caption: Acquiring a lease on the file_system. """ lease = DataLakeLeaseClient(self, lease_id=lease_id) - lease.acquire(lease_duration=lease_duration, **kwargs) + await lease.acquire(lease_duration=lease_duration, **kwargs) return lease + @distributed_trace_async async def create_file_system(self, metadata=None, # type: Optional[Dict[str, str]] public_access=None, # type: Optional[PublicAccess] **kwargs): @@ -172,17 +180,18 @@ async def create_file_system(self, metadata=None, # type: Optional[Dict[str, st .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START create_file_system] :end-before: [END create_file_system] :language: python - :dedent: 12 + :dedent: 16 :caption: Creating a file system in the datalake service. """ return await self._container_client.create_container(metadata=metadata, public_access=public_access, **kwargs) + @distributed_trace_async async def delete_file_system(self, **kwargs): # type: (Any) -> None """Marks the specified file system for deletion. @@ -218,15 +227,16 @@ async def delete_file_system(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START delete_file_system] :end-before: [END delete_file_system] :language: python - :dedent: 12 + :dedent: 16 :caption: Deleting a file system in the datalake service. """ await self._container_client.delete_container(**kwargs) + @distributed_trace_async async def get_file_system_properties(self, **kwargs): # type: (Any) -> FileSystemProperties """Returns all user-defined metadata and system properties for the specified @@ -243,16 +253,17 @@ async def get_file_system_properties(self, **kwargs): .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START get_file_system_properties] :end-before: [END get_file_system_properties] :language: python - :dedent: 12 + :dedent: 16 :caption: Getting properties on the file system. """ container_properties = await self._container_client.get_container_properties(**kwargs) return FileSystemProperties._convert_from_container_props(container_properties) # pylint: disable=protected-access + @distributed_trace_async async def set_file_system_metadata( # type: ignore self, metadata=None, # type: Optional[Dict[str, str]] **kwargs @@ -294,15 +305,16 @@ async def set_file_system_metadata( # type: ignore .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START set_file_system_metadata] :end-before: [END set_file_system_metadata] :language: python - :dedent: 12 + :dedent: 16 :caption: Setting metadata on the container. """ return await self._container_client.set_container_metadata(metadata=metadata, **kwargs) + @distributed_trace_async async def set_file_system_access_policy( self, signed_identifiers, # type: Dict[str, AccessPolicy] public_access=None, # type: Optional[Union[str, PublicAccess]] @@ -343,6 +355,7 @@ async def set_file_system_access_policy( return await self._container_client.set_container_access_policy(signed_identifiers, public_access=public_access, **kwargs) + @distributed_trace_async async def get_file_system_access_policy(self, **kwargs): # type: (Any) -> Dict[str, Any] """Gets the permissions for the specified file system. @@ -363,6 +376,7 @@ async def get_file_system_access_policy(self, **kwargs): 'signed_identifiers': access_policy['signed_identifiers'] } + @distributed_trace def get_paths(self, path=None, # type: Optional[str] recursive=True, # type: Optional[bool] max_results=None, # type: Optional[int] @@ -395,12 +409,12 @@ def get_paths(self, path=None, # type: Optional[str] .. admonition:: Example: - .. literalinclude:: ../tests/test_blob_samples_containers.py - :start-after: [START list_blobs_in_container] - :end-before: [END list_blobs_in_container] + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START get_paths_in_file_system] + :end-before: [END get_paths_in_file_system] :language: python - :dedent: 8 - :caption: List the blobs in the container. + :dedent: 12 + :caption: List the blobs in the file system. """ timeout = kwargs.pop('timeout', None) command = functools.partial( @@ -412,6 +426,7 @@ def get_paths(self, path=None, # type: Optional[str] command, recursive, path=path, max_results=max_results, page_iterator_class=PathPropertiesPaged, **kwargs) + @distributed_trace_async async def create_directory(self, directory, # type: Union[DirectoryProperties, str] metadata=None, # type: Optional[Dict[str, str]] **kwargs): @@ -467,11 +482,21 @@ async def create_directory(self, directory, # type: Union[DirectoryProperties, :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START create_directory_from_file_system] + :end-before: [END create_directory_from_file_system] + :language: python + :dedent: 12 + :caption: Create directory in the file system. """ directory_client = self.get_directory_client(directory) await directory_client.create_directory(metadata=metadata, **kwargs) return directory_client + @distributed_trace_async async def delete_directory(self, directory, # type: Union[DirectoryProperties, str] **kwargs): # type: (...) -> DataLakeDirectoryClient @@ -506,11 +531,21 @@ async def delete_directory(self, directory, # type: Union[DirectoryProperties, :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeDirectoryClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START delete_directory_from_file_system] + :end-before: [END delete_directory_from_file_system] + :language: python + :dedent: 12 + :caption: Delete directory in the file system. """ directory_client = self.get_directory_client(directory) await directory_client.delete_directory(**kwargs) return directory_client + @distributed_trace_async async def create_file(self, file, # type: Union[FileProperties, str] **kwargs): # type: (...) -> DataLakeFileClient @@ -565,11 +600,21 @@ async def create_file(self, file, # type: Union[FileProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeFileClient + + .. admonition:: Example: + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START create_file_from_file_system] + :end-before: [END create_file_from_file_system] + :language: python + :dedent: 12 + :caption: Create file in the file system. """ file_client = self.get_file_client(file) await file_client.create_file(**kwargs) return file_client + @distributed_trace_async async def delete_file(self, file, # type: Union[FileProperties, str] lease=None, # type: Optional[Union[DataLakeLeaseClient, str]] **kwargs): @@ -605,6 +650,13 @@ async def delete_file(self, file, # type: Union[FileProperties, str] :keyword int timeout: The timeout parameter is expressed in seconds. :return: DataLakeFileClient + + .. literalinclude:: ../samples/datalake_samples_file_system_async.py + :start-after: [START delete_file_from_file_system] + :end-before: [END delete_file_from_file_system] + :language: python + :dedent: 12 + :caption: Delete file in the file system. """ file_client = self.get_file_client(file) await file_client.delete_file(lease=lease, **kwargs) @@ -635,14 +687,19 @@ def get_directory_client(self, directory # type: Union[DirectoryProperties, str .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START get_directory_client_from_file_system] :end-before: [END get_directory_client_from_file_system] :language: python :dedent: 12 :caption: Getting the directory client to interact with a specific directory. """ - return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory, + try: + directory_name = directory.name + except AttributeError: + directory_name = directory + + return DataLakeDirectoryClient(self.url, self.file_system_name, directory_name=directory_name, credential=self._raw_credential, _configuration=self._config, _pipeline=self._pipeline, _hosts=self._hosts, @@ -668,7 +725,7 @@ def get_file_client(self, file_path # type: Union[FileProperties, str] .. admonition:: Example: - .. literalinclude:: ../samples/test_file_system_samples.py + .. literalinclude:: ../samples/datalake_samples_file_system_async.py :start-after: [START get_file_client_from_file_system] :end-before: [END get_file_client_from_file_system] :language: python diff --git a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py index 657b5cf15c751..798cc6dbf525b 100644 --- a/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py +++ b/sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/aio/_path_client_async.py @@ -39,6 +39,18 @@ def __init__( self._client = DataLakeStorageClient(self.url, file_system_name, path_name, pipeline=self._pipeline) self._loop = kwargs.get('loop', None) + async def __aexit__(self, *args): + await self._blob_client.close() + await super(PathClient, self).__aexit__(*args) + + async def close(self): + # type: () -> None + """ This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._blob_client.close() + await self.__aexit__() + async def _create(self, resource_type, content_settings=None, metadata=None, **kwargs): # type: (...) -> Dict[str, Union[str, datetime]] """ @@ -359,15 +371,6 @@ async def _get_path_properties(self, **kwargs): :keyword int timeout: The timeout parameter is expressed in seconds. :rtype: DirectoryProperties or FileProperties - - .. admonition:: Example: - - .. literalinclude:: ../tests/test_blob_samples_common.py - :start-after: [START get_blob_properties] - :end-before: [END get_blob_properties] - :language: python - :dedent: 8 - :caption: Getting the properties for a file/directory. """ path_properties = await self._blob_client.get_blob_properties(**kwargs) path_properties.__class__ = DirectoryProperties @@ -409,15 +412,6 @@ async def set_metadata(self, metadata=None, # type: Optional[Dict[str, str]] :keyword int timeout: The timeout parameter is expressed in seconds. :returns: file system-updated property dict (Etag and last modified). - - .. admonition:: Example: - - .. literalinclude:: ../samples/test_file_system_samples.py - :start-after: [START set_file_system_metadata] - :end-before: [END set_file_system_metadata] - :language: python - :dedent: 12 - :caption: Setting metadata on the container. """ return await self._blob_client.set_blob_metadata(metadata=metadata, **kwargs) diff --git a/sdk/storage/azure-storage-file-datalake/samples/README.md b/sdk/storage/azure-storage-file-datalake/samples/README.md index b0dc9589e2b5b..7114ae2bdebb1 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/README.md +++ b/sdk/storage/azure-storage-file-datalake/samples/README.md @@ -14,6 +14,13 @@ These are code samples that show common scenario operations with the Azure DataL Several DataLake Storage Python SDK samples are available to you in the SDK's GitHub repository. These samples provide example code for additional scenarios commonly encountered while working with DataLake Storage: +* [`datalake_samples_service.py`](https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service.py) - Examples for authenticating and operating on the client: + * Instantiate DataLakeServiceClient using connection str + * Instantiate DataLakeServiceClient using AAD Credential + * Get user delegation key + * Create all kinds of clients from DataLakeServiceClient and operate on those clients + * List file systems + * [`datalake_samples_access_control.py`](https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control.py) - Examples for common DataLake Storage tasks: * Set up a file system * Create a directory diff --git a/sdk/storage/azure-storage-file-datalake/samples/SampleSource.txt b/sdk/storage/azure-storage-file-datalake/samples/SampleSource.txt new file mode 100644 index 0000000000000..df46cce3a8c0d --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/SampleSource.txt @@ -0,0 +1 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit \ No newline at end of file diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control.py index 356229c73c303..5299c15c643f4 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control.py +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control.py @@ -1,3 +1,22 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_access_control.py +DESCRIPTION: + This sample demonstrates set/get access control on directories and files. +USAGE: + python datalake_samples_access_control.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" + import os import random import uuid diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control_async.py new file mode 100644 index 0000000000000..0e369386f24a0 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_access_control_async.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_access_control_async.py +DESCRIPTION: + This sample demonstrates set/get access control on directories and files. +USAGE: + python datalake_samples_access_control_async.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" + +import asyncio +import os +import random +import uuid + +from azure.storage.filedatalake.aio import ( + DataLakeServiceClient, +) + + +async def access_control_sample(filesystem_client): + # create a parent directory + dir_name = "testdir" + print("Creating a directory named '{}'.".format(dir_name)) + directory_client = await filesystem_client.create_directory(dir_name) + + # populate the directory with some child files + await create_child_files(directory_client, 35) + + # get and display the permissions of the parent directory + acl_props = await directory_client.get_access_control() + print("Permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions'])) + + # set the permissions of the parent directory + new_dir_permissions = 'rwx------' + await directory_client.set_access_control(permissions=new_dir_permissions) + + # get and display the permissions of the parent directory again + acl_props = await directory_client.get_access_control() + print("New permissions of directory '{}' are {}.".format(dir_name, acl_props['permissions'])) + + # iterate through every file and set their permissions to match the directory + async for file in filesystem_client.get_paths(dir_name): + file_client = filesystem_client.get_file_client(file.name) + + # get the access control properties of the file + acl_props = await file_client.get_access_control() + + if acl_props['permissions'] != new_dir_permissions: + await file_client.set_access_control(permissions=new_dir_permissions) + print("Set the permissions of file '{}' to {}.".format(file.name, new_dir_permissions)) + else: + print("Permission for file '{}' already matches the parent.".format(file.name)) + + +async def create_child_files(directory_client, num_child_files): + import itertools + + async def create_file(): + # generate a random name + file_name = str(uuid.uuid4()).replace('-', '') + file_client = directory_client.get_file_client(file_name) + await file_client.create_file() + + futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)] + await asyncio.wait(futures) + print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name)) + + +async def run(): + account_name = os.getenv('STORAGE_ACCOUNT_NAME', "") + account_key = os.getenv('STORAGE_ACCOUNT_KEY', "") + + # set up the service client with the credentials from the environment variables + service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format( + "https", + account_name + ), credential=account_key) + + async with service_client: + # generate a random name for testing purpose + fs_name = "testfs{}".format(random.randint(1, 1000)) + print("Generating a test filesystem named '{}'.".format(fs_name)) + + # create the filesystem + filesystem_client = await service_client.create_file_system(file_system=fs_name) + + # invoke the sample code + try: + await access_control_sample(filesystem_client) + finally: + # clean up the demo filesystem + await filesystem_client.delete_file_system() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run()) diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory.py index c05be0ca35ed5..536141b108dbd 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory.py +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory.py @@ -1,7 +1,28 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_directory.py +DESCRIPTION: + This sample demonstrates create directory, rename directory, get directory properties, delete directory etc. +USAGE: + python datalake_samples_directory.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" + import os import random import uuid +from azure.core.exceptions import ResourceExistsError + from azure.storage.filedatalake import ( DataLakeServiceClient, ) @@ -11,24 +32,41 @@ def directory_sample(filesystem_client): # create a parent directory dir_name = "testdir" print("Creating a directory named '{}'.".format(dir_name)) - directory_client = filesystem_client.create_directory(dir_name) + + # Create directory from file system client + filesystem_client.create_directory(dir_name) + + directory_client = filesystem_client.get_directory_client(dir_name) + try: + # Create the existing directory again will throw exception + # [START create_directory] + directory_client.create_directory() + # [END create_directory] + except ResourceExistsError: + pass # populate the directory with some child files create_child_files(directory_client, 35) # rename the directory + # [START rename_directory] new_dir_name = "testdir2" print("Renaming the directory named '{}' to '{}'.".format(dir_name, new_dir_name)) new_directory = directory_client\ .rename_directory(new_name=directory_client.file_system_name + '/' + new_dir_name) + # [END rename_directory] # display the properties of the new directory to make sure it was renamed successfully + # [START get_directory_properties] props = new_directory.get_directory_properties() + # [END get_directory_properties] print("Properties of the new directory named '{}' are: {}.".format(new_dir_name, props)) # remove the newly renamed directory print("Removing the directory named '{}'.".format(new_dir_name)) + # [START delete_directory] new_directory.delete_directory() + # [END delete_directory] def create_child_files(directory_client, num_child_files): diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory_async.py new file mode 100644 index 0000000000000..b2df75ddc1d4b --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_directory_async.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_directory_async.py +DESCRIPTION: + This sample demonstrates create directory, rename directory, get directory properties, delete directory etc. +USAGE: + python datalake_samples_directory_async.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" + +import asyncio +import os +import random +import uuid + +from azure.core.exceptions import ResourceExistsError + +from azure.storage.filedatalake.aio import ( + DataLakeServiceClient, +) + + +async def directory_sample(filesystem_client): + # create a parent directory + dir_name = "testdir" + print("Creating a directory named '{}'.".format(dir_name)) + + # Create directory from file system client + await filesystem_client.create_directory(dir_name) + + directory_client = filesystem_client.get_directory_client(dir_name) + try: + # Create the existing directory again will throw exception + # [START create_directory] + await directory_client.create_directory() + # [END create_directory] + except ResourceExistsError: + pass + + # populate the directory with some child files + await create_child_files(directory_client, 35) + + # rename the directory + # [START rename_directory] + new_dir_name = "testdir2" + print("Renaming the directory named '{}' to '{}'.".format(dir_name, new_dir_name)) + new_directory = await directory_client\ + .rename_directory(new_name=directory_client.file_system_name + '/' + new_dir_name) + # [END rename_directory] + + # display the properties of the new directory to make sure it was renamed successfully + # [START get_directory_properties] + props = await new_directory.get_directory_properties() + # [END get_directory_properties] + print("Properties of the new directory named '{}' are: {}.".format(new_dir_name, props)) + + # remove the newly renamed directory + print("Removing the directory named '{}'.".format(new_dir_name)) + # [START delete_directory] + await new_directory.delete_directory() + # [END delete_directory] + + +async def create_child_files(directory_client, num_child_files): + import itertools + # Use a thread pool because it is too slow otherwise + + async def create_file(): + # generate a random name + file_name = str(uuid.uuid4()).replace('-', '') + file_client = directory_client.get_file_client(file_name) + await file_client.create_file() + + futures = [asyncio.ensure_future(create_file()) for _ in itertools.repeat(None, num_child_files)] + await asyncio.wait(futures) + print("Created {} files under the directory '{}'.".format(num_child_files, directory_client.path_name)) + + +async def run(): + account_name = os.getenv('STORAGE_ACCOUNT_NAME', "") + account_key = os.getenv('STORAGE_ACCOUNT_KEY', "") + + # set up the service client with the credentials from the environment variables + service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format( + "https", + account_name + ), credential=account_key) + + async with service_client: + # generate a random name for testing purpose + fs_name = "testfs{}".format(random.randint(1, 1000)) + print("Generating a test filesystem named '{}'.".format(fs_name)) + + # create the filesystem + filesystem_client = await service_client.create_file_system(file_system=fs_name) + + # invoke the sample code + try: + await directory_sample(filesystem_client) + finally: + # clean up the demo filesystem + await filesystem_client.delete_file_system() + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run()) diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system.py new file mode 100644 index 0000000000000..0bd0cf256dd9b --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system.py @@ -0,0 +1,219 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_file_system.py +DESCRIPTION: + This sample demonstrates common file system operations including list paths, create a file system, + set metadata etc. +USAGE: + python datalake_samples_file_system.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account +""" + +import os + +from azure.core.exceptions import ResourceExistsError + +SOURCE_FILE = 'SampleSource.txt' + + +class FileSystemSamples(object): + + connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + + #--Begin File System Samples----------------------------------------------------------------- + + def file_system_sample(self): + + # [START create_file_system_client_from_service] + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mynewfilesystem") + # [END create_file_system_client_from_service] + + try: + # [START create_file_system] + file_system_client.create_file_system() + # [END create_file_system] + + # [START get_file_system_properties] + properties = file_system_client.get_file_system_properties() + # [END get_file_system_properties] + + finally: + # [START delete_file_system] + file_system_client.delete_file_system() + # [END delete_file_system] + + def acquire_lease_on_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + # [START create_data_lake_service_client_from_conn_str] + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + # [END create_data_lake_service_client_from_conn_str] + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myleasefilesystem") + + # Create new File System + try: + file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START acquire_lease_on_file_system] + # Acquire a lease on the file system + lease = file_system_client.acquire_lease() + + # Delete file system by passing in the lease + file_system_client.delete_file_system(lease=lease) + # [END acquire_lease_on_file_system] + + def set_metadata_on_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mymetadatafilesystemsync") + + try: + # Create new File System + file_system_client.create_file_system() + + # [START set_file_system_metadata] + # Create key, value pairs for metadata + metadata = {'type': 'test'} + + # Set metadata on the file system + file_system_client.set_file_system_metadata(metadata=metadata) + # [END set_file_system_metadata] + + # Get file system properties + properties = file_system_client.get_file_system_properties() + + finally: + # Delete file system + file_system_client.delete_file_system() + + def list_paths_in_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myfilesystem") + + # Create new File System + file_system_client.create_file_system() + + # [START upload_file_to_file_system] + with open(SOURCE_FILE, "rb") as data: + file_client = file_system_client.get_file_client("myfile") + file_client.create_file() + file_client.append_data(data, 0) + file_client.flush_data(data.tell()) + # [END upload_file_to_file_system] + + # [START get_paths_in_file_system] + path_list = file_system_client.get_paths() + for path in path_list: + print(path.name + '\n') + # [END get_paths_in_file_system] + + # Delete file system + file_system_client.delete_file_system() + + def get_file_client_from_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myfilesystem") + + # Create new File System + try: + file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START get_file_client_from_file_system] + # Get the FileClient from the FileSystemClient to interact with a specific file + file_client = file_system_client.get_file_client("mynewfile") + # [END get_file_client_from_file_system] + + # Delete file system + file_system_client.delete_file_system() + + def get_directory_client_from_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myfilesystem") + + # Create new File System + try: + file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START get_directory_client_from_file_system] + # Get the DataLakeDirectoryClient from the FileSystemClient to interact with a specific file + directory_client = file_system_client.get_directory_client("mynewdirectory") + # [END get_directory_client_from_file_system] + + # Delete file system + file_system_client.delete_file_system() + + def create_file_from_file_system(self): + # [START create_file_system_client_from_connection_string] + from azure.storage.filedatalake import FileSystemClient + file_system_client = FileSystemClient.from_connection_string(self.connection_string, "filesystem") + # [END create_file_system_client_from_connection_string] + + file_system_client.create_file_system() + + # [START create_directory_from_file_system] + directory_client = file_system_client.create_directory("mydirectory") + # [END create_directory_from_file_system] + + # [START create_file_from_file_system] + file_client = file_system_client.create_file("myfile") + # [END create_file_from_file_system] + + # [START delete_file_from_file_system] + file_system_client.delete_file("myfile") + # [END delete_file_from_file_system] + + # [START delete_directory_from_file_system] + file_system_client.delete_directory("mydirectory") + # [END delete_directory_from_file_system] + + file_system_client.delete_file_system() + +if __name__ == '__main__': + sample = FileSystemSamples() + sample.file_system_sample() + sample.acquire_lease_on_file_system() + sample.set_metadata_on_file_system() + sample.list_paths_in_file_system() + sample.get_file_client_from_file_system() + sample.create_file_from_file_system() diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system_async.py new file mode 100644 index 0000000000000..286ce724ba0a5 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_file_system_async.py @@ -0,0 +1,233 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_file_system_async.py +DESCRIPTION: + This sample demonstrates common file system operations including list paths, create a file system, + set metadata etc. +USAGE: + python datalake_samples_file_system_async.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account +""" +import asyncio +import os + +from azure.core.exceptions import ResourceExistsError + +SOURCE_FILE = 'SampleSource.txt' + + +class FileSystemSamplesAsync(object): + + connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + + #--Begin File System Samples----------------------------------------------------------------- + + async def file_system_sample(self): + + # [START create_file_system_client_from_service] + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mynewfilesystems") + # [END create_file_system_client_from_service] + + try: + # [START create_file_system] + await file_system_client.create_file_system() + # [END create_file_system] + + # [START get_file_system_properties] + properties = await file_system_client.get_file_system_properties() + # [END get_file_system_properties] + + finally: + # [START delete_file_system] + await file_system_client.delete_file_system() + # [END delete_file_system] + + async def acquire_lease_on_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + # [START create_data_lake_service_client_from_conn_str] + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + # [END create_data_lake_service_client_from_conn_str] + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myleasefilesystem") + + # Create new File System + try: + await file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START acquire_lease_on_file_system] + # Acquire a lease on the file system + lease = await file_system_client.acquire_lease() + + # Delete file system by passing in the lease + await file_system_client.delete_file_system(lease=lease) + # [END acquire_lease_on_file_system] + + async def set_metadata_on_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mymetadatafilesystemsync") + + try: + # Create new File System + await file_system_client.create_file_system() + + # [START set_file_system_metadata] + # Create key, value pairs for metadata + metadata = {'type': 'test'} + + # Set metadata on the file system + await file_system_client.set_file_system_metadata(metadata=metadata) + # [END set_file_system_metadata] + + # Get file system properties + properties = await file_system_client.get_file_system_properties() + + finally: + # Delete file system + await file_system_client.delete_file_system() + + async def list_paths_in_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mypathfilesystem") + + # Create new File System + await file_system_client.create_file_system() + + # [START upload_file_to_file_system] + file_client = file_system_client.get_file_client("myfile") + await file_client.create_file() + with open(SOURCE_FILE, "rb") as data: + length = data.tell() + await file_client.append_data(data, 0) + await file_client.flush_data(length) + # [END upload_file_to_file_system] + + # [START get_paths_in_file_system] + path_list = file_system_client.get_paths() + async for path in path_list: + print(path.name + '\n') + # [END get_paths_in_file_system] + + # Delete file system + await file_system_client.delete_file_system() + + async def get_file_client_from_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("myclientfilesystem") + + # Create new File System + try: + await file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START get_file_client_from_file_system] + # Get the FileClient from the FileSystemClient to interact with a specific file + file_client = file_system_client.get_file_client("mynewfile") + # [END get_file_client_from_file_system] + + # Delete file system + await file_system_client.delete_file_system() + + async def get_directory_client_from_file_system(self): + + # Instantiate a DataLakeServiceClient using a connection string + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + + async with datalake_service_client: + # Instantiate a FileSystemClient + file_system_client = datalake_service_client.get_file_system_client("mydirectoryfilesystem") + + # Create new File System + try: + await file_system_client.create_file_system() + except ResourceExistsError: + pass + + # [START get_directory_client_from_file_system] + # Get the DataLakeDirectoryClient from the FileSystemClient to interact with a specific file + directory_client = file_system_client.get_directory_client("mynewdirectory") + # [END get_directory_client_from_file_system] + + # Delete file system + await file_system_client.delete_file_system() + + async def create_file_from_file_system(self): + # [START create_file_system_client_from_connection_string] + from azure.storage.filedatalake.aio import FileSystemClient + file_system_client = FileSystemClient.from_connection_string(self.connection_string, "filesystemforcreate") + # [END create_file_system_client_from_connection_string] + + async with file_system_client: + await file_system_client.create_file_system() + + # [START create_directory_from_file_system] + directory_client = await file_system_client.create_directory("mydirectory") + # [END create_directory_from_file_system] + + # [START create_file_from_file_system] + file_client = await file_system_client.create_file("myfile") + # [END create_file_from_file_system] + + # [START delete_file_from_file_system] + await file_system_client.delete_file("myfile") + # [END delete_file_from_file_system] + + # [START delete_directory_from_file_system] + await file_system_client.delete_directory("mydirectory") + # [END delete_directory_from_file_system] + + await file_system_client.delete_file_system() + + +async def run(): + sample = FileSystemSamplesAsync() + await sample.file_system_sample() + await sample.acquire_lease_on_file_system() + await sample.set_metadata_on_file_system() + await sample.list_paths_in_file_system() + await sample.get_file_client_from_file_system() + await sample.create_file_from_file_system() + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run()) + loop.close() diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client.py new file mode 100644 index 0000000000000..a55396e2bb229 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +""" +FILE: datalake_samples_instantiate_client.py +DESCRIPTION: + This sample demonstrates how to instantiate directory/file client +USAGE: + python datalake_samples_instantiate_client.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account + connection str could be obtained from portal.azure.com your storage account. +""" + +import os +connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + + +def instantiate_directory_client_from_conn_str(): + # [START instantiate_directory_client_from_conn_str] + from azure.storage.filedatalake import DataLakeDirectoryClient + DataLakeDirectoryClient.from_connection_string(connection_string, "myfilesystem", "mydirectory") + # [END instantiate_directory_client_from_conn_str] + + +def instantiate_file_client_from_conn_str(): + # [START instantiate_file_client_from_conn_str] + from azure.storage.filedatalake import DataLakeFileClient + DataLakeFileClient.from_connection_string(connection_string, "myfilesystem", "mydirectory", "myfile") + # [END instantiate_file_client_from_conn_str] + + +if __name__ == '__main__': + instantiate_directory_client_from_conn_str() + instantiate_file_client_from_conn_str() diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client_async.py new file mode 100644 index 0000000000000..d0fa3f920e49d --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_instantiate_client_async.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +""" +FILE: datalake_samples_instantiate_client_async.py +DESCRIPTION: + This sample demonstrates how to instantiate directory/file client +USAGE: + python datalake_samples_instantiate_client_async.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING - the connection string to your storage account + connection str could be obtained from portal.azure.com your storage account. +""" + +import os +connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + + +def instantiate_directory_client_from_conn_str(): + # [START instantiate_directory_client_from_conn_str] + from azure.storage.filedatalake.aio import DataLakeDirectoryClient + DataLakeDirectoryClient.from_connection_string(connection_string, "myfilesystem", "mydirectory") + # [END instantiate_directory_client_from_conn_str] + + +def instantiate_file_client_from_conn_str(): + # [START instantiate_file_client_from_conn_str] + from azure.storage.filedatalake.aio import DataLakeFileClient + DataLakeFileClient.from_connection_string(connection_string, "myfilesystem", "mydirectory", "myfile") + # [END instantiate_file_client_from_conn_str] + + +if __name__ == '__main__': + instantiate_directory_client_from_conn_str() + instantiate_file_client_from_conn_str() diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service.py new file mode 100644 index 0000000000000..58860ab9523ab --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service.py @@ -0,0 +1,116 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_service.py +DESCRIPTION: + This sample demonstrates: + * Instantiate DataLakeServiceClient using connection str + * Instantiate DataLakeServiceClient using AAD Credential + * Get user delegation key + * Create all kinds of clients from DataLakeServiceClient and operate on those clients + * List file systems +USAGE: + python datalake_samples_service.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING + 2) STORAGE_ACCOUNT_NAME + 3) ACTIVE_DIRECTORY_APPLICATION_ID + 4) ACTIVE_DIRECTORY_APPLICATION_SECRET + 5) ACTIVE_DIRECTORY_TENANT_ID +""" + +import os + + +class DataLakeServiceSamples(object): + + connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] + account_name = os.getenv('STORAGE_ACCOUNT_NAME', "") + active_directory_application_id = os.getenv("ACTIVE_DIRECTORY_APPLICATION_ID") + active_directory_application_secret = os.getenv("ACTIVE_DIRECTORY_APPLICATION_SECRET") + active_directory_tenant_id = os.getenv("ACTIVE_DIRECTORY_TENANT_ID") + + #--Begin DataLake Service Samples----------------------------------------------------------------- + + def data_lake_service_sample(self): + + # Instantiate a DataLakeServiceClient using a connection string + # [START create_datalake_service_client] + from azure.storage.filedatalake import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(self.connection_string) + # [END create_datalake_service_client] + + # Instantiate a DataLakeServiceClient Azure Identity credentials. + # [START create_datalake_service_client_oauth] + from azure.identity import ClientSecretCredential + token_credential = ClientSecretCredential( + self.active_directory_tenant_id, + self.active_directory_application_id, + self.active_directory_application_secret, + ) + datalake_service_client = DataLakeServiceClient("https://{}.dfs.core.windows.net".format(self.account_name), + credential=token_credential) + # [END create_datalake_service_client_oauth] + + # get user delegation key + # [START get_user_delegation_key] + from datetime import datetime, timedelta + user_delegation_key = datalake_service_client.get_user_delegation_key(datetime.utcnow(), + datetime.utcnow() + timedelta(hours=1)) + # [END get_user_delegation_key] + + # Create file systems + # [START create_file_system_from_service_client] + datalake_service_client.create_file_system("filesystem") + # [END create_file_system_from_service_client] + file_system_client = datalake_service_client.create_file_system("anotherfilesystem") + + # List file systems + # [START list_file_systems] + file_systems = datalake_service_client.list_file_systems() + for file_system in file_systems: + print(file_system.name) + # [END list_file_systems] + + # Get Clients from DataLakeServiceClient + file_system_client = datalake_service_client.get_file_system_client(file_system_client.file_system_name) + # [START get_directory_client_from_service_client] + directory_client = datalake_service_client.get_directory_client(file_system_client.file_system_name, + "mydirectory") + # [END get_directory_client_from_service_client] + # [START get_file_client_from_service_client] + file_client = datalake_service_client.get_file_client(file_system_client.file_system_name, "myfile") + # [END get_file_client_from_service_client] + + # Create file and set properties + metadata = {'hello': 'world', 'number': '42'} + from azure.storage.filedatalake import ContentSettings + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + file_client.create_file(content_settings=content_settings) + file_client.set_metadata(metadata=metadata) + file_props = file_client.get_file_properties() + print(file_props.metadata) + + # Create file/directory and set properties + directory_client.create_directory(content_settings=content_settings, metadata=metadata) + dir_props = directory_client.get_directory_properties() + print(dir_props.metadata) + + # Delete File Systems + # [START delete_file_system_from_service_client] + datalake_service_client.delete_file_system("filesystem") + # [END delete_file_system_from_service_client] + file_system_client.delete_file_system() + + +if __name__ == '__main__': + sample = DataLakeServiceSamples() + sample.data_lake_service_sample() diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py new file mode 100644 index 0000000000000..99ba1cee4106a --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_service_async.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_service_async.py +DESCRIPTION: + This sample demonstrates: + * Instantiate DataLakeServiceClient using connection str + * Instantiate DataLakeServiceClient using AAD Credential + * Get user delegation key + * Create all kinds of clients from DataLakeServiceClient and operate on those clients + * List file systems +USAGE: + python datalake_samples_service_async.py + Set the environment variables with your own values before running the sample: + 1) AZURE_STORAGE_CONNECTION_STRING + 2) STORAGE_ACCOUNT_NAME + 3) ACTIVE_DIRECTORY_APPLICATION_ID + 4) ACTIVE_DIRECTORY_APPLICATION_SECRET + 5) ACTIVE_DIRECTORY_TENANT_ID +""" + +import asyncio +import os + + +connection_string = os.environ['AZURE_STORAGE_CONNECTION_STRING'] +account_name = os.getenv('STORAGE_ACCOUNT_NAME', "") +active_directory_application_id = os.getenv("ACTIVE_DIRECTORY_APPLICATION_ID") +active_directory_application_secret = os.getenv("ACTIVE_DIRECTORY_APPLICATION_SECRET") +active_directory_tenant_id = os.getenv("ACTIVE_DIRECTORY_TENANT_ID") + +#--Begin DataLake Service Samples----------------------------------------------------------------- + +async def data_lake_service_sample(): + + # Instantiate a DataLakeServiceClient using a connection string + # [START create_datalake_service_client] + from azure.storage.filedatalake.aio import DataLakeServiceClient + datalake_service_client = DataLakeServiceClient.from_connection_string(connection_string) + # [END create_datalake_service_client] + + # Instantiate a DataLakeServiceClient Azure Identity credentials. + # [START create_datalake_service_client_oauth] + from azure.identity.aio import ClientSecretCredential + token_credential = ClientSecretCredential( + active_directory_tenant_id, + active_directory_application_id, + active_directory_application_secret, + ) + datalake_service_client = DataLakeServiceClient("https://{}.dfs.core.windows.net".format(account_name), + credential=token_credential) + # [END create_datalake_service_client_oauth] + + async with datalake_service_client: + # get user delegation key + # [START get_user_delegation_key] + from datetime import datetime, timedelta + user_delegation_key = await datalake_service_client.get_user_delegation_key(datetime.utcnow(), + datetime.utcnow() + timedelta(hours=1)) + # [END get_user_delegation_key] + + # Create file systems + # [START create_file_system_from_service_client] + await datalake_service_client.create_file_system("filesystem") + # [END create_file_system_from_service_client] + file_system_client = await datalake_service_client.create_file_system("anotherfilesystem") + + # List file systems + # [START list_file_systems] + file_systems = datalake_service_client.list_file_systems() + async for file_system in file_systems: + print(file_system.name) + # [END list_file_systems] + + # Get Clients from DataLakeServiceClient + file_system_client = datalake_service_client.get_file_system_client(file_system_client.file_system_name) + # [START get_directory_client_from_service_client] + directory_client = datalake_service_client.get_directory_client(file_system_client.file_system_name, + "mydirectory") + # [END get_directory_client_from_service_client] + # [START get_file_client_from_service_client] + file_client = datalake_service_client.get_file_client(file_system_client.file_system_name, "myfile") + # [END get_file_client_from_service_client] + + # Create file and set properties + metadata = {'hello': 'world', 'number': '42'} + from azure.storage.filedatalake import ContentSettings + content_settings = ContentSettings( + content_language='spanish', + content_disposition='inline') + await file_client.create_file(content_settings=content_settings) + await file_client.set_metadata(metadata=metadata) + file_props = await file_client.get_file_properties() + print(file_props.metadata) + + # Create file/directory and set properties + await directory_client.create_directory(content_settings=content_settings, metadata=metadata) + dir_props = await directory_client.get_directory_properties() + print(dir_props.metadata) + + # Delete File Systems + # [START delete_file_system_from_service_client] + await datalake_service_client.delete_file_system("filesystem") + # [END delete_file_system_from_service_client] + await file_system_client.delete_file_system() + + await token_credential.close() + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(data_lake_service_sample()) + diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download.py index 73c427ea8b32d..7376b48049983 100644 --- a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download.py +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download.py @@ -1,16 +1,45 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_upload_download.py +DESCRIPTION: + This sample demonstrates: + * Set up a file system + * Create file + * Append data to the file + * Flush data to the file + * Get file properties + * Download the uploaded data + * Delete file system +USAGE: + python datalake_samples_upload_download.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" + import os import random from azure.storage.filedatalake import ( DataLakeServiceClient, ) - +SOURCE_FILE = 'SampleSource.txt' def upload_download_sample(filesystem_client): # create a file before writing content to it file_name = "testfile" print("Creating a file named '{}'.".format(file_name)) - file_client = filesystem_client.create_file(file_name) + # [START create_file] + file_client = filesystem_client.get_file_client(file_name) + file_client.create_file() + # [END create_file] # prepare the file content with 4KB of random data file_content = get_random_bytes(4*1024) @@ -20,15 +49,24 @@ def upload_download_sample(filesystem_client): print("Uploading data to '{}'.".format(file_name)) file_client.append_data(data=file_content[0:1024], offset=0, length=1024) file_client.append_data(data=file_content[1024:2048], offset=1024, length=1024) + # [START append_data] file_client.append_data(data=file_content[2048:3072], offset=2048, length=1024) + # [END append_data] file_client.append_data(data=file_content[3072:4096], offset=3072, length=1024) # data is only committed when flush is called file_client.flush_data(len(file_content)) + # Get file properties + # [START get_file_properties] + properties = file_client.get_file_properties() + # [END get_file_properties] + # read the data back print("Downloading data from '{}'.".format(file_name)) + # [START read_file] downloaded_bytes = file_client.read_file() + # [END read_file] # verify the downloaded content if file_content == downloaded_bytes: @@ -36,6 +74,18 @@ def upload_download_sample(filesystem_client): else: print("Something went wrong.") + # Rename the file + # [START rename_file] + new_client = file_client.rename_file(file_client.file_system_name + '/' + 'newname') + # [END rename_file] + + # download the renamed file in to local file + with open(SOURCE_FILE, 'wb') as stream: + new_client.read_file(stream=stream) + + # [START delete_file] + new_client.delete_file() + # [END delete_file] # help method to provide random bytes to serve as file content def get_random_bytes(size): diff --git a/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download_async.py b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download_async.py new file mode 100644 index 0000000000000..eeebe37dd45f8 --- /dev/null +++ b/sdk/storage/azure-storage-file-datalake/samples/datalake_samples_upload_download_async.py @@ -0,0 +1,127 @@ +# coding: utf-8 + +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +""" +FILE: datalake_samples_upload_download_async.py +DESCRIPTION: + This sample demonstrates: + * Set up a file system + * Create file + * Append data to the file + * Flush data to the file + * Get file properties + * Download the uploaded data + * Delete file system +USAGE: + python datalake_samples_upload_download_async.py + Set the environment variables with your own values before running the sample: + 1) STORAGE_ACCOUNT_NAME - the storage account name + 2) STORAGE_ACCOUNT_KEY - the storage account key +""" +import asyncio +import os +import random + +from azure.storage.filedatalake.aio import ( + DataLakeServiceClient, +) +SOURCE_FILE = 'SampleSource.txt' + +async def upload_download_sample(filesystem_client): + # create a file before writing content to it + file_name = "testfile" + print("Creating a file named '{}'.".format(file_name)) + # [START create_file] + file_client = filesystem_client.get_file_client(file_name) + await file_client.create_file() + # [END create_file] + + # prepare the file content with 4KB of random data + file_content = get_random_bytes(4*1024) + + # append data to the file + # the data remain uncommitted until flush is performed + print("Uploading data to '{}'.".format(file_name)) + await file_client.append_data(data=file_content[0:1024], offset=0, length=1024) + await file_client.append_data(data=file_content[1024:2048], offset=1024, length=1024) + # [START append_data] + await file_client.append_data(data=file_content[2048:3072], offset=2048, length=1024) + # [END append_data] + await file_client.append_data(data=file_content[3072:4096], offset=3072, length=1024) + + # data is only committed when flush is called + await file_client.flush_data(len(file_content)) + + # Get file properties + # [START get_file_properties] + properties = await file_client.get_file_properties() + # [END get_file_properties] + + # read the data back + print("Downloading data from '{}'.".format(file_name)) + # [START read_file] + downloaded_bytes = await file_client.read_file() + # [END read_file] + + # verify the downloaded content + if file_content == downloaded_bytes: + print("The downloaded data is equal to the data uploaded.") + else: + print("Something went wrong.") + + # Rename the file + # [START rename_file] + new_client = await file_client.rename_file(file_client.file_system_name + '/' + 'newname') + # [END rename_file] + + # download the renamed file in to local file + with open(SOURCE_FILE, 'wb') as stream: + await new_client.read_file(stream=stream) + + # [START delete_file] + await new_client.delete_file() + # [END delete_file] + +# help method to provide random bytes to serve as file content +def get_random_bytes(size): + rand = random.Random() + result = bytearray(size) + for i in range(size): + result[i] = int(rand.random()*255) # random() is consistent between python 2 and 3 + return bytes(result) + + +async def run(): + account_name = os.getenv('STORAGE_ACCOUNT_NAME', "") + account_key = os.getenv('STORAGE_ACCOUNT_KEY', "") + + # set up the service client with the credentials from the environment variables + service_client = DataLakeServiceClient(account_url="{}://{}.dfs.core.windows.net".format( + "https", + account_name + ), credential=account_key) + + async with service_client: + # generate a random name for testing purpose + fs_name = "testfs{}".format(random.randint(1, 1000)) + print("Generating a test filesystem named '{}'.".format(fs_name)) + + # create the filesystem + filesystem_client = await service_client.create_file_system(file_system=fs_name) + + # invoke the sample code + try: + await upload_download_sample(filesystem_client) + finally: + # clean up the demo filesystem + await filesystem_client.delete_file_system() + + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(run())