diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py index 3dc2e0655a38..bd032607fbd8 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore/__init__.py @@ -20,43 +20,81 @@ from google.cloud.memorystore_v1.services.memorystore.client import MemorystoreClient from google.cloud.memorystore_v1.types.memorystore import ( + AutomatedBackupConfig, + Backup, + BackupCollection, + BackupFile, + BackupInstanceRequest, CertificateAuthority, ConnectionType, CreateInstanceRequest, + CrossInstanceReplicationConfig, + DeleteBackupRequest, DeleteInstanceRequest, DiscoveryEndpoint, + ExportBackupRequest, + GetBackupCollectionRequest, + GetBackupRequest, GetCertificateAuthorityRequest, GetInstanceRequest, Instance, + ListBackupCollectionsRequest, + ListBackupCollectionsResponse, + ListBackupsRequest, + ListBackupsResponse, ListInstancesRequest, ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, NodeConfig, OperationMetadata, PersistenceConfig, + PscAttachmentDetail, PscAutoConnection, PscConnection, PscConnectionStatus, + RescheduleMaintenanceRequest, UpdateInstanceRequest, + WeeklyMaintenanceWindow, ZoneDistributionConfig, ) __all__ = ( "MemorystoreClient", + "AutomatedBackupConfig", + "Backup", + "BackupCollection", + "BackupFile", + "BackupInstanceRequest", "CertificateAuthority", "CreateInstanceRequest", + "CrossInstanceReplicationConfig", + "DeleteBackupRequest", "DeleteInstanceRequest", "DiscoveryEndpoint", + "ExportBackupRequest", + "GetBackupCollectionRequest", + "GetBackupRequest", "GetCertificateAuthorityRequest", "GetInstanceRequest", "Instance", + "ListBackupCollectionsRequest", + "ListBackupCollectionsResponse", + "ListBackupsRequest", + "ListBackupsResponse", "ListInstancesRequest", "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", "NodeConfig", "OperationMetadata", "PersistenceConfig", + "PscAttachmentDetail", "PscAutoConnection", "PscConnection", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", "ZoneDistributionConfig", "ConnectionType", "PscConnectionStatus", diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py index 4b4c5e3b9e0f..3982460b273d 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/__init__.py @@ -20,44 +20,82 @@ from .services.memorystore import MemorystoreClient from .types.memorystore import ( + AutomatedBackupConfig, + Backup, + BackupCollection, + BackupFile, + BackupInstanceRequest, CertificateAuthority, ConnectionType, CreateInstanceRequest, + CrossInstanceReplicationConfig, + DeleteBackupRequest, DeleteInstanceRequest, DiscoveryEndpoint, + ExportBackupRequest, + GetBackupCollectionRequest, + GetBackupRequest, GetCertificateAuthorityRequest, GetInstanceRequest, Instance, + ListBackupCollectionsRequest, + ListBackupCollectionsResponse, + ListBackupsRequest, + ListBackupsResponse, ListInstancesRequest, ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, NodeConfig, OperationMetadata, PersistenceConfig, + PscAttachmentDetail, PscAutoConnection, PscConnection, PscConnectionStatus, + RescheduleMaintenanceRequest, UpdateInstanceRequest, + WeeklyMaintenanceWindow, ZoneDistributionConfig, ) __all__ = ( + "AutomatedBackupConfig", + "Backup", + "BackupCollection", + "BackupFile", + "BackupInstanceRequest", "CertificateAuthority", "ConnectionType", "CreateInstanceRequest", + "CrossInstanceReplicationConfig", + "DeleteBackupRequest", "DeleteInstanceRequest", "DiscoveryEndpoint", + "ExportBackupRequest", + "GetBackupCollectionRequest", + "GetBackupRequest", "GetCertificateAuthorityRequest", "GetInstanceRequest", "Instance", + "ListBackupCollectionsRequest", + "ListBackupCollectionsResponse", + "ListBackupsRequest", + "ListBackupsResponse", "ListInstancesRequest", "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", "MemorystoreClient", "NodeConfig", "OperationMetadata", "PersistenceConfig", + "PscAttachmentDetail", "PscAutoConnection", "PscConnection", "PscConnectionStatus", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", "ZoneDistributionConfig", ) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json index b33f1e105163..c92c7a886e81 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_metadata.json @@ -10,16 +10,41 @@ "rest": { "libraryClient": "MemorystoreClient", "rpcs": { + "BackupInstance": { + "methods": [ + "backup_instance" + ] + }, "CreateInstance": { "methods": [ "create_instance" ] }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, "DeleteInstance": { "methods": [ "delete_instance" ] }, + "ExportBackup": { + "methods": [ + "export_backup" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupCollection": { + "methods": [ + "get_backup_collection" + ] + }, "GetCertificateAuthority": { "methods": [ "get_certificate_authority" @@ -30,11 +55,26 @@ "get_instance" ] }, + "ListBackupCollections": { + "methods": [ + "list_backup_collections" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, "ListInstances": { "methods": [ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py index a775f9ae0a81..6b282e84ebec 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py @@ -197,6 +197,52 @@ def transport(self) -> MemorystoreTransport: """ return self._transport + @staticmethod + def backup_path( + project: str, + location: str, + backup_collection: str, + backup: str, + ) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/locations/{location}/backupCollections/{backup_collection}/backups/{backup}".format( + project=project, + location=location, + backup_collection=backup_collection, + backup=backup, + ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str, str]: + """Parses a backup path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupCollections/(?P.+?)/backups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def backup_collection_path( + project: str, + location: str, + backup_collection: str, + ) -> str: + """Returns a fully-qualified backup_collection string.""" + return "projects/{project}/locations/{location}/backupCollections/{backup_collection}".format( + project=project, + location=location, + backup_collection=backup_collection, + ) + + @staticmethod + def parse_backup_collection_path(path: str) -> Dict[str, str]: + """Parses a backup_collection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupCollections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def certificate_authority_path( project: str, @@ -219,6 +265,30 @@ def parse_certificate_authority_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def forwarding_rule_path( project: str, @@ -1051,15 +1121,9 @@ def sample_create_instance(): client = memorystore_v1.MemorystoreClient() # Initialize request argument(s) - instance = memorystore_v1.Instance() - instance.psc_auto_connections.port = 453 - instance.psc_auto_connections.project_id = "project_id_value" - instance.psc_auto_connections.network = "network_value" - request = memorystore_v1.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", - instance=instance, ) # Make the request @@ -1208,13 +1272,7 @@ def sample_update_instance(): client = memorystore_v1.MemorystoreClient() # Initialize request argument(s) - instance = memorystore_v1.Instance() - instance.psc_auto_connections.port = 453 - instance.psc_auto_connections.project_id = "project_id_value" - instance.psc_auto_connections.network = "network_value" - request = memorystore_v1.UpdateInstanceRequest( - instance=instance, ) # Make the request @@ -1562,6 +1620,983 @@ def sample_get_certificate_authority(): # Done; return the response. return response + def reschedule_maintenance( + self, + request: Optional[Union[memorystore.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[ + memorystore.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Reschedules upcoming maintenance event. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_reschedule_maintenance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for rescheduling instance + maintenance. + name (str): + Required. Name of the instance to reschedule maintenance + for: + ``projects/{project}/locations/{location_id}/instances/{instance}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.memorystore_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, + schedule_time must be set. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC + 3339 format. Example: ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1.types.Instance` A + Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, reschedule_type, schedule_time] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.RescheduleMaintenanceRequest): + request = memorystore.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_backup_collections( + self, + request: Optional[Union[memorystore.ListBackupCollectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupCollectionsPager: + r"""Lists all backup collections owned by a consumer project in + either the specified location (region) or all locations. + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_list_backup_collections(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListBackupCollectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_collections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.ListBackupCollectionsRequest, dict]): + The request object. Request for [ListBackupCollections] + parent (str): + Required. The resource name of the backupCollection + location using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.memorystore_v1.services.memorystore.pagers.ListBackupCollectionsPager: + Response for [ListBackupCollections]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.ListBackupCollectionsRequest): + request = memorystore.ListBackupCollectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_collections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupCollectionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_collection( + self, + request: Optional[Union[memorystore.GetBackupCollectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.BackupCollection: + r"""Get a backup collection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_get_backup_collection(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetBackupCollectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_collection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.GetBackupCollectionRequest, dict]): + The request object. Request for [GetBackupCollection]. + name (str): + Required. Instance backupCollection resource name using + the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.memorystore_v1.types.BackupCollection: + BackupCollection of an instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetBackupCollectionRequest): + request = memorystore.GetBackupCollectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_collection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backups( + self, + request: Optional[Union[memorystore.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists all backups owned by a backup collection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_list_backups(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.ListBackupsRequest, dict]): + The request object. Request for [ListBackups]. + parent (str): + Required. The resource name of the backupCollection + using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.memorystore_v1.services.memorystore.pagers.ListBackupsPager: + Response for [ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.ListBackupsRequest): + request = memorystore.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup( + self, + request: Optional[Union[memorystore.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.Backup: + r"""Gets the details of a specific backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_get_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.GetBackupRequest, dict]): + The request object. Request for [GetBackup]. + name (str): + Required. Instance backup resource name using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.memorystore_v1.types.Backup: + Backup of an instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.GetBackupRequest): + request = memorystore.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup( + self, + request: Optional[Union[memorystore.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a specific backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_delete_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.DeleteBackupRequest, dict]): + The request object. Request for [DeleteBackup]. + name (str): + Required. Instance backup resource name using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.DeleteBackupRequest): + request = memorystore.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_backup( + self, + request: Optional[Union[memorystore.ExportBackupRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Exports a specific backup to a customer target Cloud + Storage URI. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_export_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ExportBackupRequest( + gcs_bucket="gcs_bucket_value", + name="name_value", + ) + + # Make the request + operation = client.export_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.ExportBackupRequest, dict]): + The request object. Request for [ExportBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1.types.Backup` Backup + of an instance. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.ExportBackupRequest): + request = memorystore.ExportBackupRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Backup, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + + def backup_instance( + self, + request: Optional[Union[memorystore.BackupInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Backup Instance. + If this is the first time a backup is being created, a + backup collection will be created at the backend, and + this backup belongs to this collection. Both collection + and backup will have a resource name. Backup will be + executed for each shard. A replica (primary if nonHA) + will be selected to perform the execution. Backup call + will be rejected if there is an ongoing backup or update + operation. Be aware that during preview, if the + instance's internal software version is too old, + critical update will be performed before actual backup. + Once the internal software version is updated to the + minimum version required by the backup feature, + subsequent backups will not require critical update. + After preview, there will be no critical update needed + for backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import memorystore_v1 + + def sample_backup_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.BackupInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.backup_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.memorystore_v1.types.BackupInstanceRequest, dict]): + The request object. Request for [BackupInstance]. + name (str): + Required. Instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.memorystore_v1.types.Instance` A + Memorystore instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, memorystore.BackupInstanceRequest): + request = memorystore.BackupInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.backup_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + memorystore.Instance, + metadata_type=memorystore.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "MemorystoreClient": return self diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py index 091cc4dd16c2..57f957b57e5b 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/pagers.py @@ -115,3 +115,155 @@ def __iter__(self) -> Iterator[memorystore.Instance]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupCollectionsPager: + """A pager for iterating through ``list_backup_collections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memorystore_v1.types.ListBackupCollectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_collections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupCollections`` requests and continue to iterate + through the ``backup_collections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memorystore_v1.types.ListBackupCollectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., memorystore.ListBackupCollectionsResponse], + request: memorystore.ListBackupCollectionsRequest, + response: memorystore.ListBackupCollectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memorystore_v1.types.ListBackupCollectionsRequest): + The initial request object. + response (google.cloud.memorystore_v1.types.ListBackupCollectionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = memorystore.ListBackupCollectionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[memorystore.ListBackupCollectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[memorystore.BackupCollection]: + for page in self.pages: + yield from page.backup_collections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.memorystore_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.memorystore_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., memorystore.ListBackupsResponse], + request: memorystore.ListBackupsRequest, + response: memorystore.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.memorystore_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.memorystore_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = memorystore.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[memorystore.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[memorystore.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py index 14061fcec9bd..d1ca9f7fba28 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/base.py @@ -187,6 +187,46 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_collections: gapic_v1.method.wrap_method( + self.list_backup_collections, + default_timeout=None, + client_info=client_info, + ), + self.get_backup_collection: gapic_v1.method.wrap_method( + self.get_backup_collection, + default_timeout=None, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_timeout=None, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_timeout=None, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_timeout=None, + client_info=client_info, + ), + self.export_backup: gapic_v1.method.wrap_method( + self.export_backup, + default_timeout=None, + client_info=client_info, + ), + self.backup_instance: gapic_v1.method.wrap_method( + self.backup_instance, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -293,6 +333,83 @@ def get_certificate_authority( ]: raise NotImplementedError() + @property + def reschedule_maintenance( + self, + ) -> Callable[ + [memorystore.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_backup_collections( + self, + ) -> Callable[ + [memorystore.ListBackupCollectionsRequest], + Union[ + memorystore.ListBackupCollectionsResponse, + Awaitable[memorystore.ListBackupCollectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_backup_collection( + self, + ) -> Callable[ + [memorystore.GetBackupCollectionRequest], + Union[memorystore.BackupCollection, Awaitable[memorystore.BackupCollection]], + ]: + raise NotImplementedError() + + @property + def list_backups( + self, + ) -> Callable[ + [memorystore.ListBackupsRequest], + Union[ + memorystore.ListBackupsResponse, Awaitable[memorystore.ListBackupsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_backup( + self, + ) -> Callable[ + [memorystore.GetBackupRequest], + Union[memorystore.Backup, Awaitable[memorystore.Backup]], + ]: + raise NotImplementedError() + + @property + def delete_backup( + self, + ) -> Callable[ + [memorystore.DeleteBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def export_backup( + self, + ) -> Callable[ + [memorystore.ExportBackupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def backup_instance( + self, + ) -> Callable[ + [memorystore.BackupInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py index e96cbd702868..a5f41d57453d 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py @@ -70,6 +70,14 @@ class MemorystoreRestInterceptor: .. code-block:: python class MyCustomMemorystoreInterceptor(MemorystoreRestInterceptor): + def pre_backup_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_backup_instance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -78,6 +86,14 @@ def post_create_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_backup(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -86,6 +102,30 @@ def post_delete_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_export_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_collection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_collection(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_certificate_authority(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -102,6 +142,22 @@ def post_get_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_collections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_collections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_instances(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -110,6 +166,14 @@ def post_list_instances(self, response): logging.log(f"Received response: {response}") return response + def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -124,6 +188,54 @@ def post_update_instance(self, response): """ + def pre_backup_instance( + self, + request: memorystore.BackupInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.BackupInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for backup_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_backup_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for backup_instance + + DEPRECATED. Please use the `post_backup_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_backup_instance` interceptor runs + before the `post_backup_instance_with_metadata` interceptor. + """ + return response + + def post_backup_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for backup_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_backup_instance_with_metadata` + interceptor in new development instead of the `post_backup_instance` interceptor. + When both interceptors are used, this `post_backup_instance_with_metadata` interceptor runs after the + `post_backup_instance` interceptor. The (possibly modified) response returned by + `post_backup_instance` will be passed to + `post_backup_instance_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: memorystore.CreateInstanceRequest, @@ -172,6 +284,54 @@ def post_create_instance_with_metadata( """ return response, metadata + def pre_delete_backup( + self, + request: memorystore.DeleteBackupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_delete_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_backup + + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. + """ + return response + + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: memorystore.DeleteInstanceRequest, @@ -220,6 +380,146 @@ def post_delete_instance_with_metadata( """ return response, metadata + def pre_export_backup( + self, + request: memorystore.ExportBackupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.ExportBackupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for export_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_export_backup( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_backup + + DEPRECATED. Please use the `post_export_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_export_backup` interceptor runs + before the `post_export_backup_with_metadata` interceptor. + """ + return response + + def post_export_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_export_backup_with_metadata` + interceptor in new development instead of the `post_export_backup` interceptor. + When both interceptors are used, this `post_export_backup_with_metadata` interceptor runs after the + `post_export_backup` interceptor. The (possibly modified) response returned by + `post_export_backup` will be passed to + `post_export_backup_with_metadata`. + """ + return response, metadata + + def pre_get_backup( + self, + request: memorystore.GetBackupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[memorystore.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_backup(self, response: memorystore.Backup) -> memorystore.Backup: + """Post-rpc interceptor for get_backup + + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. + """ + return response + + def post_get_backup_with_metadata( + self, + response: memorystore.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[memorystore.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + + def pre_get_backup_collection( + self, + request: memorystore.GetBackupCollectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.GetBackupCollectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_backup_collection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_backup_collection( + self, response: memorystore.BackupCollection + ) -> memorystore.BackupCollection: + """Post-rpc interceptor for get_backup_collection + + DEPRECATED. Please use the `post_get_backup_collection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_get_backup_collection` interceptor runs + before the `post_get_backup_collection_with_metadata` interceptor. + """ + return response + + def post_get_backup_collection_with_metadata( + self, + response: memorystore.BackupCollection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[memorystore.BackupCollection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_collection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_backup_collection_with_metadata` + interceptor in new development instead of the `post_get_backup_collection` interceptor. + When both interceptors are used, this `post_get_backup_collection_with_metadata` interceptor runs after the + `post_get_backup_collection` interceptor. The (possibly modified) response returned by + `post_get_backup_collection` will be passed to + `post_get_backup_collection_with_metadata`. + """ + return response, metadata + def pre_get_certificate_authority( self, request: memorystore.GetCertificateAuthorityRequest, @@ -315,125 +615,274 @@ def post_get_instance_with_metadata( """ return response, metadata - def pre_list_instances( + def pre_list_backup_collections( self, - request: memorystore.ListInstancesRequest, + request: memorystore.ListBackupCollectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - memorystore.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + memorystore.ListBackupCollectionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for list_instances + """Pre-rpc interceptor for list_backup_collections Override in a subclass to manipulate the request or metadata before they are sent to the Memorystore server. """ return request, metadata - def post_list_instances( - self, response: memorystore.ListInstancesResponse - ) -> memorystore.ListInstancesResponse: - """Post-rpc interceptor for list_instances + def post_list_backup_collections( + self, response: memorystore.ListBackupCollectionsResponse + ) -> memorystore.ListBackupCollectionsResponse: + """Post-rpc interceptor for list_backup_collections - DEPRECATED. Please use the `post_list_instances_with_metadata` + DEPRECATED. Please use the `post_list_backup_collections_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. This `post_list_instances` interceptor runs - before the `post_list_instances_with_metadata` interceptor. + it is returned to user code. This `post_list_backup_collections` interceptor runs + before the `post_list_backup_collections_with_metadata` interceptor. """ return response - def post_list_instances_with_metadata( + def post_list_backup_collections_with_metadata( self, - response: memorystore.ListInstancesResponse, + response: memorystore.ListBackupCollectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - memorystore.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + memorystore.ListBackupCollectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Post-rpc interceptor for list_instances + """Post-rpc interceptor for list_backup_collections Override in a subclass to read or manipulate the response or metadata after it is returned by the Memorystore server but before it is returned to user code. - We recommend only using this `post_list_instances_with_metadata` - interceptor in new development instead of the `post_list_instances` interceptor. - When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the - `post_list_instances` interceptor. The (possibly modified) response returned by - `post_list_instances` will be passed to - `post_list_instances_with_metadata`. + We recommend only using this `post_list_backup_collections_with_metadata` + interceptor in new development instead of the `post_list_backup_collections` interceptor. + When both interceptors are used, this `post_list_backup_collections_with_metadata` interceptor runs after the + `post_list_backup_collections` interceptor. The (possibly modified) response returned by + `post_list_backup_collections` will be passed to + `post_list_backup_collections_with_metadata`. """ return response, metadata - def pre_update_instance( + def pre_list_backups( self, - request: memorystore.UpdateInstanceRequest, + request: memorystore.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - memorystore.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] - ]: - """Pre-rpc interceptor for update_instance + ) -> Tuple[memorystore.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backups Override in a subclass to manipulate the request or metadata before they are sent to the Memorystore server. """ return request, metadata - def post_update_instance( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance + def post_list_backups( + self, response: memorystore.ListBackupsResponse + ) -> memorystore.ListBackupsResponse: + """Post-rpc interceptor for list_backups - DEPRECATED. Please use the `post_update_instance_with_metadata` + DEPRECATED. Please use the `post_list_backups_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. This `post_update_instance` interceptor runs - before the `post_update_instance_with_metadata` interceptor. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response - def post_update_instance_with_metadata( + def post_list_backups_with_metadata( self, - response: operations_pb2.Operation, + response: memorystore.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance + ) -> Tuple[ + memorystore.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backups Override in a subclass to read or manipulate the response or metadata after it is returned by the Memorystore server but before it is returned to user code. - We recommend only using this `post_update_instance_with_metadata` - interceptor in new development instead of the `post_update_instance` interceptor. - When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the - `post_update_instance` interceptor. The (possibly modified) response returned by - `post_update_instance` will be passed to - `post_update_instance_with_metadata`. + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. """ return response, metadata - def pre_get_location( + def pre_list_instances( self, - request: locations_pb2.GetLocationRequest, + request: memorystore.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + memorystore.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] ]: - """Pre-rpc interceptor for get_location + """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata before they are sent to the Memorystore server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the Memorystore server but before + def post_list_instances( + self, response: memorystore.ListInstancesResponse + ) -> memorystore.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + def post_list_instances_with_metadata( + self, + response: memorystore.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + def pre_reschedule_maintenance( + self, + request: memorystore.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.RescheduleMaintenanceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_reschedule_maintenance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. + """ + return response + + def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + + def pre_update_instance( + self, + request: memorystore.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Memorystore server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Memorystore server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Memorystore server but before it is returned to user code. """ return response @@ -699,11 +1148,11 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _CreateInstance( - _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + class _BackupInstance( + _BaseMemorystoreRestTransport._BaseBackupInstance, MemorystoreRestStub ): def __hash__(self): - return hash("MemorystoreRestTransport.CreateInstance") + return hash("MemorystoreRestTransport.BackupInstance") @staticmethod def _get_response( @@ -730,17 +1179,17 @@ def _get_response( def __call__( self, - request: memorystore.CreateInstanceRequest, + request: memorystore.BackupInstanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create instance method over HTTP. + r"""Call the backup instance method over HTTP. Args: - request (~.memorystore.CreateInstanceRequest): - The request object. Request message for [CreateInstance][]. + request (~.memorystore.BackupInstanceRequest): + The request object. Request for [BackupInstance]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -758,21 +1207,1212 @@ def __call__( """ http_options = ( - _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + _BaseMemorystoreRestTransport._BaseBackupInstance._get_http_options() ) - request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + request, metadata = self._interceptor.pre_backup_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseBackupInstance._get_transcoded_request( http_options, request ) - body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + body = _BaseMemorystoreRestTransport._BaseBackupInstance._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseBackupInstance._get_query_params_json( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.BackupInstance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "BackupInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._BackupInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_backup_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_backup_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.backup_instance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "BackupInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInstance( + _BaseMemorystoreRestTransport._BaseCreateInstance, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: memorystore.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.memorystore.CreateInstanceRequest): + The request object. Request message for [CreateInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseCreateInstance._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) + + body = _BaseMemorystoreRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.CreateInstance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.create_instance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteBackup( + _BaseMemorystoreRestTransport._BaseDeleteBackup, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.DeleteBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete backup method over HTTP. + + Args: + request (~.memorystore.DeleteBackupRequest): + The request object. Request for [DeleteBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteBackup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseDeleteBackup._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMemorystoreRestTransport._BaseDeleteBackup._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.DeleteBackup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "DeleteBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._DeleteBackup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.delete_backup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "DeleteBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInstance( + _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.memorystore.DeleteInstanceRequest): + The request object. Request message for [DeleteInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.DeleteInstance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.delete_instance", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExportBackup( + _BaseMemorystoreRestTransport._BaseExportBackup, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.ExportBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: memorystore.ExportBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the export backup method over HTTP. + + Args: + request (~.memorystore.ExportBackupRequest): + The request object. Request for [ExportBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseExportBackup._get_http_options() + ) + + request, metadata = self._interceptor.pre_export_backup(request, metadata) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseExportBackup._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseMemorystoreRestTransport._BaseExportBackup._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMemorystoreRestTransport._BaseExportBackup._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.ExportBackup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "ExportBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._ExportBackup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_backup_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.export_backup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "ExportBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetBackup(_BaseMemorystoreRestTransport._BaseGetBackup, MemorystoreRestStub): + def __hash__(self): + return hash("MemorystoreRestTransport.GetBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.GetBackupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.memorystore.GetBackupRequest): + The request object. Request for [GetBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.memorystore.Backup: + Backup of an instance. + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseGetBackup._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_backup(request, metadata) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetBackup._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMemorystoreRestTransport._BaseGetBackup._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetBackup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._GetBackup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.Backup() + pb_resp = memorystore.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = memorystore.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_backup", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetBackupCollection( + _BaseMemorystoreRestTransport._BaseGetBackupCollection, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.GetBackupCollection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.GetBackupCollectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.BackupCollection: + r"""Call the get backup collection method over HTTP. + + Args: + request (~.memorystore.GetBackupCollectionRequest): + The request object. Request for [GetBackupCollection]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.memorystore.BackupCollection: + BackupCollection of an instance. + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseGetBackupCollection._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_backup_collection( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetBackupCollection._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetBackupCollection._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetBackupCollection", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetBackupCollection", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._GetBackupCollection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.BackupCollection() + pb_resp = memorystore.BackupCollection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup_collection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_collection_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = memorystore.BackupCollection.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_backup_collection", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetBackupCollection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetCertificateAuthority( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.GetCertificateAuthority") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.GetCertificateAuthorityRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.CertificateAuthority: + r"""Call the get certificate authority method over HTTP. + + Args: + request (~.memorystore.GetCertificateAuthorityRequest): + The request object. Request message for [GetCertificateAuthority][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.memorystore.CertificateAuthority: + A certificate authority for an + instance. + + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_certificate_authority( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetCertificateAuthority", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetCertificateAuthority", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = memorystore.CertificateAuthority() + pb_resp = memorystore.CertificateAuthority.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_certificate_authority(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_authority_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = memorystore.CertificateAuthority.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_certificate_authority", + extra={ + "serviceName": "google.cloud.memorystore.v1.Memorystore", + "rpcName": "GetCertificateAuthority", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstance( + _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + ): + def __hash__(self): + return hash("MemorystoreRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: memorystore.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> memorystore.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.memorystore.GetInstanceRequest): + The request object. Request message for [GetInstance][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.memorystore.Instance: + A Memorystore instance. + """ + + http_options = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) + # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseCreateInstance._get_query_params_json( - transcoded_request + query_params = ( + _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -783,7 +2423,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -793,24 +2433,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.CreateInstance", + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetInstance", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "CreateInstance", + "rpcName": "GetInstance", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = MemorystoreRestTransport._CreateInstance._get_response( + response = MemorystoreRestTransport._GetInstance._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -819,19 +2458,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = memorystore.Instance() + pb_resp = memorystore.Instance.pb(resp) - resp = self._interceptor.post_create_instance(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, _ = self._interceptor.post_get_instance_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = memorystore.Instance.to_json(response) except: response_payload = None http_response = { @@ -840,21 +2481,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.memorystore_v1.MemorystoreClient.create_instance", + "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_instance", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "CreateInstance", + "rpcName": "GetInstance", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteInstance( - _BaseMemorystoreRestTransport._BaseDeleteInstance, MemorystoreRestStub + class _ListBackupCollections( + _BaseMemorystoreRestTransport._BaseListBackupCollections, MemorystoreRestStub ): def __hash__(self): - return hash("MemorystoreRestTransport.DeleteInstance") + return hash("MemorystoreRestTransport.ListBackupCollections") @staticmethod def _get_response( @@ -880,17 +2521,17 @@ def _get_response( def __call__( self, - request: memorystore.DeleteInstanceRequest, + request: memorystore.ListBackupCollectionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete instance method over HTTP. + ) -> memorystore.ListBackupCollectionsResponse: + r"""Call the list backup collections method over HTTP. Args: - request (~.memorystore.DeleteInstanceRequest): - The request object. Request message for [DeleteInstance][]. + request (~.memorystore.ListBackupCollectionsRequest): + The request object. Request for [ListBackupCollections] retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -900,24 +2541,23 @@ def __call__( be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + ~.memorystore.ListBackupCollectionsResponse: + Response for [ListBackupCollections]. """ http_options = ( - _BaseMemorystoreRestTransport._BaseDeleteInstance._get_http_options() + _BaseMemorystoreRestTransport._BaseListBackupCollections._get_http_options() ) - request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_transcoded_request( + request, metadata = self._interceptor.pre_list_backup_collections( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseListBackupCollections._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseDeleteInstance._get_query_params_json( + query_params = _BaseMemorystoreRestTransport._BaseListBackupCollections._get_query_params_json( transcoded_request ) @@ -929,7 +2569,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -939,17 +2579,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.DeleteInstance", + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.ListBackupCollections", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "DeleteInstance", + "rpcName": "ListBackupCollections", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = MemorystoreRestTransport._DeleteInstance._get_response( + response = MemorystoreRestTransport._ListBackupCollections._get_response( self._host, metadata, query_params, @@ -964,19 +2604,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = memorystore.ListBackupCollectionsResponse() + pb_resp = memorystore.ListBackupCollectionsResponse.pb(resp) - resp = self._interceptor.post_delete_instance(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_collections(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, _ = self._interceptor.post_list_backup_collections_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = ( + memorystore.ListBackupCollectionsResponse.to_json(response) + ) except: response_payload = None http_response = { @@ -985,21 +2629,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.memorystore_v1.MemorystoreClient.delete_instance", + "Received response for google.cloud.memorystore_v1.MemorystoreClient.list_backup_collections", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "DeleteInstance", + "rpcName": "ListBackupCollections", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetCertificateAuthority( - _BaseMemorystoreRestTransport._BaseGetCertificateAuthority, MemorystoreRestStub + class _ListBackups( + _BaseMemorystoreRestTransport._BaseListBackups, MemorystoreRestStub ): def __hash__(self): - return hash("MemorystoreRestTransport.GetCertificateAuthority") + return hash("MemorystoreRestTransport.ListBackups") @staticmethod def _get_response( @@ -1025,17 +2669,17 @@ def _get_response( def __call__( self, - request: memorystore.GetCertificateAuthorityRequest, + request: memorystore.ListBackupsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> memorystore.CertificateAuthority: - r"""Call the get certificate authority method over HTTP. + ) -> memorystore.ListBackupsResponse: + r"""Call the list backups method over HTTP. Args: - request (~.memorystore.GetCertificateAuthorityRequest): - The request object. Request message for [GetCertificateAuthority][]. + request (~.memorystore.ListBackupsRequest): + The request object. Request for [ListBackups]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1045,26 +2689,26 @@ def __call__( be of type `bytes`. Returns: - ~.memorystore.CertificateAuthority: - A certificate authority for an - instance. - + ~.memorystore.ListBackupsResponse: + Response for [ListBackups]. """ http_options = ( - _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_http_options() + _BaseMemorystoreRestTransport._BaseListBackups._get_http_options() ) - request, metadata = self._interceptor.pre_get_certificate_authority( - request, metadata - ) - transcoded_request = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_list_backups(request, metadata) + transcoded_request = ( + _BaseMemorystoreRestTransport._BaseListBackups._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseMemorystoreRestTransport._BaseGetCertificateAuthority._get_query_params_json( - transcoded_request + query_params = ( + _BaseMemorystoreRestTransport._BaseListBackups._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1085,17 +2729,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetCertificateAuthority", + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.ListBackups", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "GetCertificateAuthority", + "rpcName": "ListBackups", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = MemorystoreRestTransport._GetCertificateAuthority._get_response( + response = MemorystoreRestTransport._ListBackups._get_response( self._host, metadata, query_params, @@ -1110,23 +2754,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = memorystore.CertificateAuthority() - pb_resp = memorystore.CertificateAuthority.pb(resp) + resp = memorystore.ListBackupsResponse() + pb_resp = memorystore.ListBackupsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_certificate_authority(resp) + resp = self._interceptor.post_list_backups(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_certificate_authority_with_metadata( + resp, _ = self._interceptor.post_list_backups_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = memorystore.CertificateAuthority.to_json( - response - ) + response_payload = memorystore.ListBackupsResponse.to_json(response) except: response_payload = None http_response = { @@ -1135,21 +2777,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_certificate_authority", + "Received response for google.cloud.memorystore_v1.MemorystoreClient.list_backups", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "GetCertificateAuthority", + "rpcName": "ListBackups", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetInstance( - _BaseMemorystoreRestTransport._BaseGetInstance, MemorystoreRestStub + class _ListInstances( + _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub ): def __hash__(self): - return hash("MemorystoreRestTransport.GetInstance") + return hash("MemorystoreRestTransport.ListInstances") @staticmethod def _get_response( @@ -1175,17 +2817,17 @@ def _get_response( def __call__( self, - request: memorystore.GetInstanceRequest, + request: memorystore.ListInstancesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> memorystore.Instance: - r"""Call the get instance method over HTTP. + ) -> memorystore.ListInstancesResponse: + r"""Call the list instances method over HTTP. Args: - request (~.memorystore.GetInstanceRequest): - The request object. Request message for [GetInstance][]. + request (~.memorystore.ListInstancesRequest): + The request object. Request message for [ListInstances][]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1195,24 +2837,22 @@ def __call__( be of type `bytes`. Returns: - ~.memorystore.Instance: - A Memorystore instance. + ~.memorystore.ListInstancesResponse: + Response message for [ListInstances][]. """ http_options = ( - _BaseMemorystoreRestTransport._BaseGetInstance._get_http_options() + _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() ) - request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = ( - _BaseMemorystoreRestTransport._BaseGetInstance._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + http_options, request ) # Jsonify the query params query_params = ( - _BaseMemorystoreRestTransport._BaseGetInstance._get_query_params_json( + _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( transcoded_request ) ) @@ -1235,17 +2875,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.GetInstance", + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.ListInstances", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "GetInstance", + "rpcName": "ListInstances", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = MemorystoreRestTransport._GetInstance._get_response( + response = MemorystoreRestTransport._ListInstances._get_response( self._host, metadata, query_params, @@ -1260,21 +2900,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = memorystore.Instance() - pb_resp = memorystore.Instance.pb(resp) + resp = memorystore.ListInstancesResponse() + pb_resp = memorystore.ListInstancesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_instance(resp) + resp = self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, _ = self._interceptor.post_list_instances_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = memorystore.Instance.to_json(response) + response_payload = memorystore.ListInstancesResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -1283,21 +2925,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.memorystore_v1.MemorystoreClient.get_instance", + "Received response for google.cloud.memorystore_v1.MemorystoreClient.list_instances", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "GetInstance", + "rpcName": "ListInstances", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListInstances( - _BaseMemorystoreRestTransport._BaseListInstances, MemorystoreRestStub + class _RescheduleMaintenance( + _BaseMemorystoreRestTransport._BaseRescheduleMaintenance, MemorystoreRestStub ): def __hash__(self): - return hash("MemorystoreRestTransport.ListInstances") + return hash("MemorystoreRestTransport.RescheduleMaintenance") @staticmethod def _get_response( @@ -1318,22 +2960,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: memorystore.ListInstancesRequest, + request: memorystore.RescheduleMaintenanceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> memorystore.ListInstancesResponse: - r"""Call the list instances method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. Args: - request (~.memorystore.ListInstancesRequest): - The request object. Request message for [ListInstances][]. + request (~.memorystore.RescheduleMaintenanceRequest): + The request object. Request for rescheduling instance + maintenance. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1343,24 +2987,31 @@ def __call__( be of type `bytes`. Returns: - ~.memorystore.ListInstancesResponse: - Response message for [ListInstances][]. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + """ http_options = ( - _BaseMemorystoreRestTransport._BaseListInstances._get_http_options() + _BaseMemorystoreRestTransport._BaseRescheduleMaintenance._get_http_options() ) - request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseMemorystoreRestTransport._BaseListInstances._get_transcoded_request( + request, metadata = self._interceptor.pre_reschedule_maintenance( + request, metadata + ) + transcoded_request = _BaseMemorystoreRestTransport._BaseRescheduleMaintenance._get_transcoded_request( http_options, request ) + body = _BaseMemorystoreRestTransport._BaseRescheduleMaintenance._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = ( - _BaseMemorystoreRestTransport._BaseListInstances._get_query_params_json( - transcoded_request - ) + query_params = _BaseMemorystoreRestTransport._BaseRescheduleMaintenance._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1371,7 +3022,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -1381,23 +3032,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.ListInstances", + f"Sending request for google.cloud.memorystore_v1.MemorystoreClient.RescheduleMaintenance", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "ListInstances", + "rpcName": "RescheduleMaintenance", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = MemorystoreRestTransport._ListInstances._get_response( + response = MemorystoreRestTransport._RescheduleMaintenance._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1406,23 +3058,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = memorystore.ListInstancesResponse() - pb_resp = memorystore.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_instances(resp) + resp = self._interceptor.post_reschedule_maintenance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = memorystore.ListInstancesResponse.to_json( - response - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -1431,10 +3079,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.memorystore_v1.MemorystoreClient.list_instances", + "Received response for google.cloud.memorystore_v1.MemorystoreClient.reschedule_maintenance", extra={ "serviceName": "google.cloud.memorystore.v1.Memorystore", - "rpcName": "ListInstances", + "rpcName": "RescheduleMaintenance", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -1592,6 +3240,14 @@ def __call__( ) return resp + @property + def backup_instance( + self, + ) -> Callable[[memorystore.BackupInstanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BackupInstance(self._session, self._host, self._interceptor) # type: ignore + @property def create_instance( self, @@ -1600,6 +3256,14 @@ def create_instance( # In C++ this would require a dynamic_cast return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_backup( + self, + ) -> Callable[[memorystore.DeleteBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + @property def delete_instance( self, @@ -1608,6 +3272,32 @@ def delete_instance( # In C++ this would require a dynamic_cast return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def export_backup( + self, + ) -> Callable[[memorystore.ExportBackupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup( + self, + ) -> Callable[[memorystore.GetBackupRequest], memorystore.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_collection( + self, + ) -> Callable[ + [memorystore.GetBackupCollectionRequest], memorystore.BackupCollection + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupCollection(self._session, self._host, self._interceptor) # type: ignore + @property def get_certificate_authority( self, @@ -1626,6 +3316,25 @@ def get_instance( # In C++ this would require a dynamic_cast return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_collections( + self, + ) -> Callable[ + [memorystore.ListBackupCollectionsRequest], + memorystore.ListBackupCollectionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupCollections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups( + self, + ) -> Callable[[memorystore.ListBackupsRequest], memorystore.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + @property def list_instances( self, @@ -1636,6 +3345,14 @@ def list_instances( # In C++ this would require a dynamic_cast return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + @property + def reschedule_maintenance( + self, + ) -> Callable[[memorystore.RescheduleMaintenanceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + @property def update_instance( self, diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py index 7435f1957cbe..ae91ce07e3ea 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest_base.py @@ -89,6 +89,63 @@ def __init__( api_audience=api_audience, ) + class _BaseBackupInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:backup", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.BackupInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseBackupInstance._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -148,6 +205,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/backupCollections/*/backups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseDeleteBackup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -195,6 +299,157 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExportBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/backupCollections/*/backups/*}:export", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.ExportBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseExportBackup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupCollections/*/backups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetBackup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackupCollection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupCollections/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.GetBackupCollectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseGetBackupCollection._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetCertificateAuthority: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -289,6 +544,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListBackupCollections: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupCollections", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.ListBackupCollectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListBackupCollections._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupCollections/*}/backups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseListBackups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -336,6 +685,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseRescheduleMaintenance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = memorystore.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMemorystoreRestTransport._BaseRescheduleMaintenance._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py index 412ec4452e77..fd36d368bd79 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/__init__.py @@ -14,42 +14,80 @@ # limitations under the License. # from .memorystore import ( + AutomatedBackupConfig, + Backup, + BackupCollection, + BackupFile, + BackupInstanceRequest, CertificateAuthority, ConnectionType, CreateInstanceRequest, + CrossInstanceReplicationConfig, + DeleteBackupRequest, DeleteInstanceRequest, DiscoveryEndpoint, + ExportBackupRequest, + GetBackupCollectionRequest, + GetBackupRequest, GetCertificateAuthorityRequest, GetInstanceRequest, Instance, + ListBackupCollectionsRequest, + ListBackupCollectionsResponse, + ListBackupsRequest, + ListBackupsResponse, ListInstancesRequest, ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, NodeConfig, OperationMetadata, PersistenceConfig, + PscAttachmentDetail, PscAutoConnection, PscConnection, PscConnectionStatus, + RescheduleMaintenanceRequest, UpdateInstanceRequest, + WeeklyMaintenanceWindow, ZoneDistributionConfig, ) __all__ = ( + "AutomatedBackupConfig", + "Backup", + "BackupCollection", + "BackupFile", + "BackupInstanceRequest", "CertificateAuthority", "CreateInstanceRequest", + "CrossInstanceReplicationConfig", + "DeleteBackupRequest", "DeleteInstanceRequest", "DiscoveryEndpoint", + "ExportBackupRequest", + "GetBackupCollectionRequest", + "GetBackupRequest", "GetCertificateAuthorityRequest", "GetInstanceRequest", "Instance", + "ListBackupCollectionsRequest", + "ListBackupCollectionsResponse", + "ListBackupsRequest", + "ListBackupsResponse", "ListInstancesRequest", "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", "NodeConfig", "OperationMetadata", "PersistenceConfig", + "PscAttachmentDetail", "PscAutoConnection", "PscConnection", + "RescheduleMaintenanceRequest", "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", "ZoneDistributionConfig", "ConnectionType", "PscConnectionStatus", diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py index 05876b2e2fa8..716de0148973 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py @@ -17,8 +17,11 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -27,18 +30,37 @@ "PscConnectionStatus", "ConnectionType", "Instance", + "AutomatedBackupConfig", + "BackupCollection", + "Backup", + "BackupFile", + "CrossInstanceReplicationConfig", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "PscAttachmentDetail", "PscAutoConnection", "PscConnection", "DiscoveryEndpoint", "PersistenceConfig", "NodeConfig", "ZoneDistributionConfig", + "RescheduleMaintenanceRequest", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", "CreateInstanceRequest", "UpdateInstanceRequest", "DeleteInstanceRequest", + "ListBackupCollectionsRequest", + "ListBackupCollectionsResponse", + "GetBackupCollectionRequest", + "ListBackupsRequest", + "ListBackupsResponse", + "GetBackupRequest", + "DeleteBackupRequest", + "ExportBackupRequest", + "BackupInstanceRequest", "GetCertificateAuthorityRequest", "CertificateAuthority", "OperationMetadata", @@ -87,9 +109,27 @@ class ConnectionType(proto.Enum): class Instance(proto.Message): r"""A Memorystore instance. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + gcs_source (google.cloud.memorystore_v1.types.Instance.GcsBackupSource): + Optional. Immutable. Backups that stored in + Cloud Storage buckets. The Cloud Storage buckets + need to be the same region as the instances. + Read permission is required to import from the + provided Cloud Storage Objects. + + This field is a member of `oneof`_ ``import_sources``. + managed_backup_source (google.cloud.memorystore_v1.types.Instance.ManagedBackupSource): + Optional. Immutable. Backups that generated + and managed by memorystore service. + + This field is a member of `oneof`_ ``import_sources``. name (str): Identifier. Unique name of the instance. Format: @@ -125,18 +165,17 @@ class Instance(proto.Message): shard_count (int): Optional. Number of shards for the instance. discovery_endpoints (MutableSequence[google.cloud.memorystore_v1.types.DiscoveryEndpoint]): - Output only. Endpoints clients can connect to - the instance through. Currently only one - discovery endpoint is supported. + Output only. Deprecated: Use the + endpoints.connections.psc_auto_connection or + endpoints.connections.psc_connection values instead. node_type (google.cloud.memorystore_v1.types.Instance.NodeType): - Optional. Immutable. Machine type for - individual nodes of the instance. + Optional. Machine type for individual nodes + of the instance. persistence_config (google.cloud.memorystore_v1.types.PersistenceConfig): Optional. Persistence configuration of the instance. engine_version (str): - Optional. Immutable. Engine version of the - instance. + Optional. Engine version of the instance. engine_configs (MutableMapping[str, str]): Optional. User-provided engine configurations for the instance. @@ -153,12 +192,48 @@ class Instance(proto.Message): This field is a member of `oneof`_ ``_deletion_protection_enabled``. psc_auto_connections (MutableSequence[google.cloud.memorystore_v1.types.PscAutoConnection]): - Required. Immutable. User inputs and resource - details of the auto-created PSC connections. + Optional. Immutable. Deprecated: Use the + endpoints.connections.psc_auto_connection value instead. + psc_attachment_details (MutableSequence[google.cloud.memorystore_v1.types.PscAttachmentDetail]): + Output only. Service attachment details to + configure PSC connections. endpoints (MutableSequence[google.cloud.memorystore_v1.types.Instance.InstanceEndpoint]): Optional. Endpoints for the instance. mode (google.cloud.memorystore_v1.types.Instance.Mode): Optional. The mode config for the instance. + ondemand_maintenance (bool): + Optional. Input only. Ondemand maintenance + for the instance. + + This field is a member of `oneof`_ ``_ondemand_maintenance``. + maintenance_policy (google.cloud.memorystore_v1.types.MaintenancePolicy): + Optional. The maintenance policy for the + instance. If not provided, the maintenance event + will be performed based on Memorystore internal + rollout schedule. + maintenance_schedule (google.cloud.memorystore_v1.types.MaintenanceSchedule): + Output only. Published maintenance schedule. + cross_instance_replication_config (google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig): + Optional. The config for cross instance + replication. + async_instance_endpoints_deletion_enabled (bool): + Optional. If true, instance endpoints that + are created and registered by customers can be + deleted asynchronously. That is, such an + instance endpoint can be de-registered before + the forwarding rules in the instance endpoint + are deleted. + + This field is a member of `oneof`_ ``_async_instance_endpoints_deletion_enabled``. + backup_collection (str): + Output only. The backup collection full + resource name. Example: + projects/{project}/locations/{location}/backupCollections/{collection} + + This field is a member of `oneof`_ ``_backup_collection``. + automated_backup_config (google.cloud.memorystore_v1.types.AutomatedBackupConfig): + Optional. The automated backup config for the + instance. """ class State(proto.Enum): @@ -284,6 +359,16 @@ class UpdateInfo(proto.Message): per shard for the instance. This field is a member of `oneof`_ ``_target_replica_count``. + target_engine_version (str): + Output only. Target engine version for the + instance. + + This field is a member of `oneof`_ ``_target_engine_version``. + target_node_type (google.cloud.memorystore_v1.types.Instance.NodeType): + Output only. Target node type for the + instance. + + This field is a member of `oneof`_ ``_target_node_type``. """ target_shard_count: int = proto.Field( @@ -296,6 +381,17 @@ class UpdateInfo(proto.Message): number=2, optional=True, ) + target_engine_version: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + target_node_type: "Instance.NodeType" = proto.Field( + proto.ENUM, + number=4, + optional=True, + enum="Instance.NodeType", + ) update_info: "Instance.StateInfo.UpdateInfo" = proto.Field( proto.MESSAGE, @@ -304,6 +400,41 @@ class UpdateInfo(proto.Message): message="Instance.StateInfo.UpdateInfo", ) + class GcsBackupSource(proto.Message): + r"""Backups that stored in Cloud Storage buckets. + The Cloud Storage buckets need to be the same region as the + instances. + + Attributes: + uris (MutableSequence[str]): + Optional. Example: gs://bucket1/object1, + gs://bucket2/folder2/object2 + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class ManagedBackupSource(proto.Message): + r"""Backups that generated and managed by memorystore. + + Attributes: + backup (str): + Optional. Example: + //memorystore.googleapis.com/projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup} + A shorter version (without the prefix) of the backup name is + also supported, like + projects/{project}/locations/{location}/backupCollections/{collection}/backups/{backup_id} + In this case, it assumes the backup is under + memorystore.googleapis.com. + """ + + backup: str = proto.Field( + proto.STRING, + number=1, + ) + class InstanceEndpoint(proto.Message): r"""InstanceEndpoint consists of PSC connections that are created as a group in each VPC network for accessing the instance. In @@ -335,9 +466,9 @@ class ConnectionDetail(proto.Message): Attributes: psc_auto_connection (google.cloud.memorystore_v1.types.PscAutoConnection): - Detailed information of a PSC connection that - is created through service connectivity - automation. + Immutable. Detailed information of a PSC + connection that is created through service + connectivity automation. This field is a member of `oneof`_ ``connection``. psc_connection (google.cloud.memorystore_v1.types.PscConnection): @@ -360,6 +491,18 @@ class ConnectionDetail(proto.Message): message="PscConnection", ) + gcs_source: GcsBackupSource = proto.Field( + proto.MESSAGE, + number=23, + oneof="import_sources", + message=GcsBackupSource, + ) + managed_backup_source: ManagedBackupSource = proto.Field( + proto.MESSAGE, + number=24, + oneof="import_sources", + message=ManagedBackupSource, + ) name: str = proto.Field( proto.STRING, number=1, @@ -436,35 +579,629 @@ class ConnectionDetail(proto.Message): proto.STRING, number=16, ) - node_config: "NodeConfig" = proto.Field( + node_config: "NodeConfig" = proto.Field( + proto.MESSAGE, + number=17, + message="NodeConfig", + ) + zone_distribution_config: "ZoneDistributionConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="ZoneDistributionConfig", + ) + deletion_protection_enabled: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message="PscAutoConnection", + ) + psc_attachment_details: MutableSequence[ + "PscAttachmentDetail" + ] = proto.RepeatedField( + proto.MESSAGE, + number=21, + message="PscAttachmentDetail", + ) + endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message=InstanceEndpoint, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=26, + enum=Mode, + ) + ondemand_maintenance: bool = proto.Field( + proto.BOOL, + number=28, + optional=True, + ) + maintenance_policy: "MaintenancePolicy" = proto.Field( + proto.MESSAGE, + number=31, + message="MaintenancePolicy", + ) + maintenance_schedule: "MaintenanceSchedule" = proto.Field( + proto.MESSAGE, + number=32, + message="MaintenanceSchedule", + ) + cross_instance_replication_config: "CrossInstanceReplicationConfig" = proto.Field( + proto.MESSAGE, + number=33, + message="CrossInstanceReplicationConfig", + ) + async_instance_endpoints_deletion_enabled: bool = proto.Field( + proto.BOOL, + number=44, + optional=True, + ) + backup_collection: str = proto.Field( + proto.STRING, + number=47, + optional=True, + ) + automated_backup_config: "AutomatedBackupConfig" = proto.Field( + proto.MESSAGE, + number=48, + message="AutomatedBackupConfig", + ) + + +class AutomatedBackupConfig(proto.Message): + r"""The automated backup config for an instance. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fixed_frequency_schedule (google.cloud.memorystore_v1.types.AutomatedBackupConfig.FixedFrequencySchedule): + Optional. Trigger automated backups at a + fixed frequency. + + This field is a member of `oneof`_ ``schedule``. + automated_backup_mode (google.cloud.memorystore_v1.types.AutomatedBackupConfig.AutomatedBackupMode): + Optional. The automated backup mode. If the + mode is disabled, the other fields will be + ignored. + retention (google.protobuf.duration_pb2.Duration): + Optional. How long to keep automated backups + before the backups are deleted. The value should + be between 1 day and 365 days. If not specified, + the default value is 35 days. + """ + + class AutomatedBackupMode(proto.Enum): + r"""The automated backup mode. + + Values: + AUTOMATED_BACKUP_MODE_UNSPECIFIED (0): + Default value. Automated backup config is not + specified. + DISABLED (1): + Automated backup config disabled. + ENABLED (2): + Automated backup config enabled. + """ + AUTOMATED_BACKUP_MODE_UNSPECIFIED = 0 + DISABLED = 1 + ENABLED = 2 + + class FixedFrequencySchedule(proto.Message): + r"""This schedule allows the backup to be triggered at a fixed + frequency (currently only daily is supported). + + Attributes: + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. The start time of every automated + backup in UTC. It must be set to the start of an + hour. This field is required. + """ + + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + + fixed_frequency_schedule: FixedFrequencySchedule = proto.Field( + proto.MESSAGE, + number=2, + oneof="schedule", + message=FixedFrequencySchedule, + ) + automated_backup_mode: AutomatedBackupMode = proto.Field( + proto.ENUM, + number=1, + enum=AutomatedBackupMode, + ) + retention: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class BackupCollection(proto.Message): + r"""BackupCollection of an instance. + + Attributes: + name (str): + Identifier. Full resource path of the backup + collection. + instance_uid (str): + Output only. The instance uid of the backup + collection. + instance (str): + Output only. The full resource path of the + instance the backup collection belongs to. + Example: + + projects/{project}/locations/{location}/instances/{instance} + kms_key (str): + Output only. The KMS key used to encrypt the + backups under this backup collection. + uid (str): + Output only. System assigned unique + identifier of the backup collection. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup + collection was created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + instance_uid: str = proto.Field( + proto.STRING, + number=3, + ) + instance: str = proto.Field( + proto.STRING, + number=4, + ) + kms_key: str = proto.Field( + proto.STRING, + number=5, + ) + uid: str = proto.Field( + proto.STRING, + number=6, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +class Backup(proto.Message): + r"""Backup of an instance. + + Attributes: + name (str): + Identifier. Full resource path of the backup. the last part + of the name is the backup id with the following format: + [YYYYMMDDHHMMSS]_[Shorted Instance UID] OR customer + specified while backup instance. Example: + 20240515123000_1234 + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup was + created. + instance (str): + Output only. Instance resource path of this + backup. + instance_uid (str): + Output only. Instance uid of this backup. + total_size_bytes (int): + Output only. Total size of the backup in + bytes. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup will + expire. + engine_version (str): + Output only. valkey-7.5/valkey-8.0, etc. + backup_files (MutableSequence[google.cloud.memorystore_v1.types.BackupFile]): + Output only. List of backup files of the + backup. + node_type (google.cloud.memorystore_v1.types.Instance.NodeType): + Output only. Node type of the instance. + replica_count (int): + Output only. Number of replicas for the + instance. + shard_count (int): + Output only. Number of shards for the + instance. + backup_type (google.cloud.memorystore_v1.types.Backup.BackupType): + Output only. Type of the backup. + state (google.cloud.memorystore_v1.types.Backup.State): + Output only. State of the backup. + uid (str): + Output only. System assigned unique + identifier of the backup. + """ + + class BackupType(proto.Enum): + r"""Type of the backup. + + Values: + BACKUP_TYPE_UNSPECIFIED (0): + The default value, not set. + ON_DEMAND (1): + On-demand backup. + AUTOMATED (2): + Automated backup. + """ + BACKUP_TYPE_UNSPECIFIED = 0 + ON_DEMAND = 1 + AUTOMATED = 2 + + class State(proto.Enum): + r"""State of the backup. + + Values: + STATE_UNSPECIFIED (0): + The default value, not set. + CREATING (1): + The backup is being created. + ACTIVE (2): + The backup is active to be used. + DELETING (3): + The backup is being deleted. + SUSPENDED (4): + The backup is currently suspended due to + reasons like project deletion, billing account + closure, etc. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + SUSPENDED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + instance: str = proto.Field( + proto.STRING, + number=3, + ) + instance_uid: str = proto.Field( + proto.STRING, + number=4, + ) + total_size_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + engine_version: str = proto.Field( + proto.STRING, + number=7, + ) + backup_files: MutableSequence["BackupFile"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="BackupFile", + ) + node_type: "Instance.NodeType" = proto.Field( + proto.ENUM, + number=9, + enum="Instance.NodeType", + ) + replica_count: int = proto.Field( + proto.INT32, + number=10, + ) + shard_count: int = proto.Field( + proto.INT32, + number=11, + ) + backup_type: BackupType = proto.Field( + proto.ENUM, + number=12, + enum=BackupType, + ) + state: State = proto.Field( + proto.ENUM, + number=13, + enum=State, + ) + uid: str = proto.Field( + proto.STRING, + number=15, + ) + + +class BackupFile(proto.Message): + r"""Backup is consisted of multiple backup files. + + Attributes: + file_name (str): + Output only. e.g: .rdb + size_bytes (int): + Output only. Size of the backup file in + bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the backup file + was created. + """ + + file_name: str = proto.Field( + proto.STRING, + number=1, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class CrossInstanceReplicationConfig(proto.Message): + r"""Cross instance replication config. + + Attributes: + instance_role (google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.InstanceRole): + Required. The role of the instance in cross + instance replication. + primary_instance (google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.RemoteInstance): + Optional. Details of the primary instance + that is used as the replication source for this + secondary instance. + + This field is only set for a secondary instance. + secondary_instances (MutableSequence[google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.RemoteInstance]): + Optional. List of secondary instances that + are replicating from this primary instance. + + This field is only set for a primary instance. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last time cross instance + replication config was updated. + membership (google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.Membership): + Output only. An output only view of all the + member instances participating in the cross + instance replication. This view will be provided + by every member instance irrespective of its + instance role(primary or secondary). + + A primary instance can provide information about + all the secondary instances replicating from it. + However, a secondary instance only knows about + the primary instance from which it is + replicating. However, for scenarios, where the + primary instance is unavailable(e.g. regional + outage), a Getinstance request can be sent to + any other member instance and this field will + list all the member instances participating in + cross instance replication. + """ + + class InstanceRole(proto.Enum): + r"""The role of the instance in cross instance replication. + + Values: + INSTANCE_ROLE_UNSPECIFIED (0): + instance role is not set. + The behavior is equivalent to NONE. + NONE (1): + This instance does not participate in cross + instance replication. It is an independent + instance and does not replicate to or from any + other instances. + PRIMARY (2): + A instance that allows both reads and writes. + Any data written to this instance is also + replicated to the attached secondary instances. + SECONDARY (3): + A instance that allows only reads and + replicates data from a primary instance. + """ + INSTANCE_ROLE_UNSPECIFIED = 0 + NONE = 1 + PRIMARY = 2 + SECONDARY = 3 + + class RemoteInstance(proto.Message): + r"""Details of the remote instance associated with this instance + in a cross instance replication setup. + + Attributes: + instance (str): + Optional. The full resource path of the + remote instance in the format: + projects//locations//instances/ + uid (str): + Output only. The unique identifier of the + remote instance. + """ + + instance: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + + class Membership(proto.Message): + r"""An output only view of all the member instances participating + in the cross instance replication. + + Attributes: + primary_instance (google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.RemoteInstance): + Output only. The primary instance that acts + as the source of replication for the secondary + instances. + secondary_instances (MutableSequence[google.cloud.memorystore_v1.types.CrossInstanceReplicationConfig.RemoteInstance]): + Output only. The list of secondary instances + replicating from the primary instance. + """ + + primary_instance: "CrossInstanceReplicationConfig.RemoteInstance" = proto.Field( + proto.MESSAGE, + number=1, + message="CrossInstanceReplicationConfig.RemoteInstance", + ) + secondary_instances: MutableSequence[ + "CrossInstanceReplicationConfig.RemoteInstance" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CrossInstanceReplicationConfig.RemoteInstance", + ) + + instance_role: InstanceRole = proto.Field( + proto.ENUM, + number=1, + enum=InstanceRole, + ) + primary_instance: RemoteInstance = proto.Field( + proto.MESSAGE, + number=2, + message=RemoteInstance, + ) + secondary_instances: MutableSequence[RemoteInstance] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=RemoteInstance, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + membership: Membership = proto.Field( + proto.MESSAGE, + number=5, + message=Membership, + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy per instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + updated. + weekly_maintenance_window (MutableSequence[google.cloud.memorystore_v1.types.WeeklyMaintenanceWindow]): + Optional. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_window is expected to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + weekly_maintenance_window: MutableSequence[ + "WeeklyMaintenanceWindow" + ] = proto.RepeatedField( proto.MESSAGE, - number=17, - message="NodeConfig", + number=3, + message="WeeklyMaintenanceWindow", ) - zone_distribution_config: "ZoneDistributionConfig" = proto.Field( - proto.MESSAGE, - number=18, - message="ZoneDistributionConfig", + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window specified for weekly operations. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Optional. Allows to define schedule that runs + specified day of the week. + start_time (google.type.timeofday_pb2.TimeOfDay): + Optional. Start time of the window in UTC. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, ) - deletion_protection_enabled: bool = proto.Field( - proto.BOOL, - number=19, - optional=True, + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, ) - psc_auto_connections: MutableSequence["PscAutoConnection"] = proto.RepeatedField( + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, - number=20, - message="PscAutoConnection", + number=1, + message=timestamp_pb2.Timestamp, ) - endpoints: MutableSequence[InstanceEndpoint] = proto.RepeatedField( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, - number=25, - message=InstanceEndpoint, + number=2, + message=timestamp_pb2.Timestamp, ) - mode: Mode = proto.Field( + + +class PscAttachmentDetail(proto.Message): + r"""Configuration of a service attachment of the cluster, for + creating PSC connections. + + Attributes: + service_attachment (str): + Output only. Service attachment URI which + your self-created PscConnection should use as + target. + connection_type (google.cloud.memorystore_v1.types.ConnectionType): + Output only. Type of Psc endpoint. + """ + + service_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + connection_type: "ConnectionType" = proto.Field( proto.ENUM, - number=26, - enum=Mode, + number=4, + enum="ConnectionType", ) @@ -475,8 +1212,8 @@ class PscAutoConnection(proto.Message): Attributes: port (int): - Optional. Output only. port will only be set - for Primary/Reader or Discovery endpoint. + Optional. port will only be set for + Primary/Reader or Discovery endpoint. This field is a member of `oneof`_ ``ports``. psc_connection_id (str): @@ -557,9 +1294,16 @@ class PscAutoConnection(proto.Message): class PscConnection(proto.Message): r"""User created Psc connection configuration. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + port (int): + Optional. port will only be set for + Primary/Reader or Discovery endpoint. + + This field is a member of `oneof`_ ``ports``. psc_connection_id (str): - Output only. The PSC connection id of the + Required. The PSC connection id of the forwarding rule connected to the service attachment. ip_address (str): @@ -591,6 +1335,11 @@ class PscConnection(proto.Message): Output only. Type of the PSC connection. """ + port: int = proto.Field( + proto.INT32, + number=9, + oneof="ports", + ) psc_connection_id: str = proto.Field( proto.STRING, number=1, @@ -845,6 +1594,56 @@ class ZoneDistributionMode(proto.Enum): ) +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for rescheduling instance maintenance. + + Attributes: + name (str): + Required. Name of the instance to reschedule maintenance + for: + ``projects/{project}/locations/{location_id}/instances/{instance}`` + reschedule_type (google.cloud.memorystore_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, schedule_time + must be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC 3339 + format. Example: ``2012-11-15T16:19:00.094Z``. + """ + + class RescheduleType(proto.Enum): + r"""Reschedule options. + + Values: + RESCHEDULE_TYPE_UNSPECIFIED (0): + Not set. + IMMEDIATE (1): + If the user wants to schedule the maintenance + to happen now. + SPECIFIC_TIME (3): + If the user wants to reschedule the + maintenance to a specific time. + """ + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + SPECIFIC_TIME = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + class ListInstancesRequest(proto.Message): r"""Request message for [ListInstances][]. @@ -1091,6 +1890,274 @@ class DeleteInstanceRequest(proto.Message): ) +class ListBackupCollectionsRequest(proto.Message): + r"""Request for [ListBackupCollections] + + Attributes: + parent (str): + Required. The resource name of the backupCollection location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [``next_page_token``][google.cloud.memorystore.v1.ListBackupCollectionsResponse.next_page_token] + to determine if there are more clusters left to be queried. + page_token (str): + Optional. The ``next_page_token`` value returned from a + previous [ListBackupCollections] request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBackupCollectionsResponse(proto.Message): + r"""Response for [ListBackupCollections]. + + Attributes: + backup_collections (MutableSequence[google.cloud.memorystore_v1.types.BackupCollection]): + A list of backupCollections in the project. + + If the ``location_id`` in the parent field of the request is + "-", all regions available to the project are queried, and + the results aggregated. If in such an aggregated query a + location is unavailable, a placeholder backupCollection + entry is included in the response with the ``name`` field + set to a value of the form + ``projects/{project_id}/locations/{location_id}/backupCollections/``- + and the ``status`` field set to ERROR and ``status_message`` + field set to "location not available for + ListBackupCollections". + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_collections: MutableSequence["BackupCollection"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupCollection", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupCollectionRequest(proto.Message): + r"""Request for [GetBackupCollection]. + + Attributes: + name (str): + Required. Instance backupCollection resource name using the + form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}`` + where ``location_id`` refers to a Google Cloud region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsRequest(proto.Message): + r"""Request for [ListBackups]. + + Attributes: + parent (str): + Required. The resource name of the backupCollection using + the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}`` + page_size (int): + Optional. The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [``next_page_token``][google.cloud.memorystore.v1.ListBackupsResponse.next_page_token] + to determine if there are more clusters left to be queried. + page_token (str): + Optional. The ``next_page_token`` value returned from a + previous [ListBackupCollections] request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBackupsResponse(proto.Message): + r"""Response for [ListBackups]. + + Attributes: + backups (MutableSequence[google.cloud.memorystore_v1.types.Backup]): + A list of backups in the project. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Backups that could not be reached. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetBackupRequest(proto.Message): + r"""Request for [GetBackup]. + + Attributes: + name (str): + Required. Instance backup resource name using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupRequest(proto.Message): + r"""Request for [DeleteBackup]. + + Attributes: + name (str): + Required. Instance backup resource name using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`` + request_id (str): + Optional. Idempotent request UUID. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExportBackupRequest(proto.Message): + r"""Request for [ExportBackup]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_bucket (str): + Google Cloud Storage bucket, like + "my-bucket". + + This field is a member of `oneof`_ ``destination``. + name (str): + Required. Instance backup resource name using the form: + ``projects/{project_id}/locations/{location_id}/backupCollections/{backup_collection_id}/backups/{backup_id}`` + """ + + gcs_bucket: str = proto.Field( + proto.STRING, + number=2, + oneof="destination", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BackupInstanceRequest(proto.Message): + r"""Request for [BackupInstance]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a Google Cloud region. + ttl (google.protobuf.duration_pb2.Duration): + Optional. TTL for the backup to expire. Value + range is 1 day to 100 years. If not specified, + the default value is 100 years. + backup_id (str): + Optional. The id of the backup to be created. If not + specified, the default value ([YYYYMMDDHHMMSS]_[Shortened + Instance UID] is used. + + This field is a member of `oneof`_ ``_backup_id``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + backup_id: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + + class GetCertificateAuthorityRequest(proto.Message): r"""Request message for [GetCertificateAuthority][]. diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_backup_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_backup_instance_sync.py new file mode 100644 index 000000000000..6909b4e2951f --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_backup_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BackupInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_BackupInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_backup_instance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.BackupInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.backup_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_BackupInstance_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py index c3299b3eb25c..c2638c9c097d 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_create_instance_sync.py @@ -39,15 +39,9 @@ def sample_create_instance(): client = memorystore_v1.MemorystoreClient() # Initialize request argument(s) - instance = memorystore_v1.Instance() - instance.psc_auto_connections.port = 453 - instance.psc_auto_connections.project_id = "project_id_value" - instance.psc_auto_connections.network = "network_value" - request = memorystore_v1.CreateInstanceRequest( parent="parent_value", instance_id="instance_id_value", - instance=instance, ) # Make the request diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_backup_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_backup_sync.py new file mode 100644 index 000000000000..17b59489bf2a --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_delete_backup_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_delete_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_DeleteBackup_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_export_backup_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_export_backup_sync.py new file mode 100644 index 000000000000..baeeaacae104 --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_export_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_ExportBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_export_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ExportBackupRequest( + gcs_bucket="gcs_bucket_value", + name="name_value", + ) + + # Make the request + operation = client.export_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_ExportBackup_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_collection_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_collection_sync.py new file mode 100644 index 000000000000..ded07519ad02 --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_collection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupCollection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_GetBackupCollection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_get_backup_collection(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetBackupCollectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_collection(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_GetBackupCollection_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_sync.py new file mode 100644 index 000000000000..90a42c591008 --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_get_backup(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_GetBackup_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backup_collections_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backup_collections_sync.py new file mode 100644 index 000000000000..cb1b1a2bb934 --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backup_collections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupCollections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_ListBackupCollections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_list_backup_collections(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListBackupCollectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_collections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memorystore_v1_generated_Memorystore_ListBackupCollections_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backups_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backups_sync.py new file mode 100644 index 000000000000..ec315f1a1d3e --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_list_backups(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END memorystore_v1_generated_Memorystore_ListBackups_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_reschedule_maintenance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_reschedule_maintenance_sync.py new file mode 100644 index 000000000000..98f5332bbf05 --- /dev/null +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-memorystore + + +# [START memorystore_v1_generated_Memorystore_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import memorystore_v1 + + +def sample_reschedule_maintenance(): + # Create a client + client = memorystore_v1.MemorystoreClient() + + # Initialize request argument(s) + request = memorystore_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END memorystore_v1_generated_Memorystore_RescheduleMaintenance_sync] diff --git a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py index b1fce73bc93e..ca9ab96bf5cd 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py +++ b/packages/google-cloud-memorystore/samples/generated_samples/memorystore_v1_generated_memorystore_update_instance_sync.py @@ -39,13 +39,7 @@ def sample_update_instance(): client = memorystore_v1.MemorystoreClient() # Initialize request argument(s) - instance = memorystore_v1.Instance() - instance.psc_auto_connections.port = 453 - instance.psc_auto_connections.project_id = "project_id_value" - instance.psc_auto_connections.network = "network_value" - request = memorystore_v1.UpdateInstanceRequest( - instance=instance, ) # Make the request diff --git a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json index efdd1702a8b0..006f6291ab39 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json +++ b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json @@ -11,6 +11,86 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.backup_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.BackupInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "BackupInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.BackupInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "backup_instance" + }, + "description": "Sample for BackupInstance", + "file": "memorystore_v1_generated_memorystore_backup_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_BackupInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_backup_instance_sync.py" + }, { "canonical": true, "clientMethod": { @@ -37,11 +117,327 @@ "type": "str" }, { - "name": "instance", - "type": "google.cloud.memorystore_v1.types.Instance" + "name": "instance", + "type": "google.cloud.memorystore_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "memorystore_v1_generated_memorystore_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_CreateInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.delete_backup", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.DeleteBackup", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "memorystore_v1_generated_memorystore_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_DeleteBackup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.delete_instance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.DeleteInstance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "memorystore_v1_generated_memorystore_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.export_backup", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.ExportBackup", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "ExportBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.ExportBackupRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_backup" + }, + "description": "Sample for ExportBackup", + "file": "memorystore_v1_generated_memorystore_export_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_ExportBackup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_export_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.get_backup_collection", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.GetBackupCollection", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "GetBackupCollection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.GetBackupCollectionRequest" }, { - "name": "instance_id", + "name": "name", "type": "str" }, { @@ -57,22 +453,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance" + "resultType": "google.cloud.memorystore_v1.types.BackupCollection", + "shortName": "get_backup_collection" }, - "description": "Sample for CreateInstance", - "file": "memorystore_v1_generated_memorystore_create_instance_sync.py", + "description": "Sample for GetBackupCollection", + "file": "memorystore_v1_generated_memorystore_get_backup_collection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "memorystore_v1_generated_Memorystore_CreateInstance_sync", + "regionTag": "memorystore_v1_generated_Memorystore_GetBackupCollection_sync", "segments": [ { - "end": 62, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 62, + "end": 51, "start": 27, "type": "SHORT" }, @@ -82,22 +478,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 52, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 59, - "start": 53, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 63, - "start": 60, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "memorystore_v1_generated_memorystore_create_instance_sync.py" + "title": "memorystore_v1_generated_memorystore_get_backup_collection_sync.py" }, { "canonical": true, @@ -106,19 +502,19 @@ "fullName": "google.cloud.memorystore_v1.MemorystoreClient", "shortName": "MemorystoreClient" }, - "fullName": "google.cloud.memorystore_v1.MemorystoreClient.delete_instance", + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.get_backup", "method": { - "fullName": "google.cloud.memorystore.v1.Memorystore.DeleteInstance", + "fullName": "google.cloud.memorystore.v1.Memorystore.GetBackup", "service": { "fullName": "google.cloud.memorystore.v1.Memorystore", "shortName": "Memorystore" }, - "shortName": "DeleteInstance" + "shortName": "GetBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.memorystore_v1.types.DeleteInstanceRequest" + "type": "google.cloud.memorystore_v1.types.GetBackupRequest" }, { "name": "name", @@ -137,22 +533,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_instance" + "resultType": "google.cloud.memorystore_v1.types.Backup", + "shortName": "get_backup" }, - "description": "Sample for DeleteInstance", - "file": "memorystore_v1_generated_memorystore_delete_instance_sync.py", + "description": "Sample for GetBackup", + "file": "memorystore_v1_generated_memorystore_get_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "memorystore_v1_generated_Memorystore_DeleteInstance_sync", + "regionTag": "memorystore_v1_generated_Memorystore_GetBackup_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -167,17 +563,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "memorystore_v1_generated_memorystore_delete_instance_sync.py" + "title": "memorystore_v1_generated_memorystore_get_backup_sync.py" }, { "canonical": true, @@ -339,6 +735,166 @@ ], "title": "memorystore_v1_generated_memorystore_get_instance_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.list_backup_collections", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.ListBackupCollections", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "ListBackupCollections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.ListBackupCollectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.memorystore_v1.services.memorystore.pagers.ListBackupCollectionsPager", + "shortName": "list_backup_collections" + }, + "description": "Sample for ListBackupCollections", + "file": "memorystore_v1_generated_memorystore_list_backup_collections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_ListBackupCollections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_list_backup_collections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.list_backups", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.ListBackups", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.memorystore_v1.services.memorystore.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "memorystore_v1_generated_memorystore_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_list_backups_sync.py" + }, { "canonical": true, "clientMethod": { @@ -419,6 +975,94 @@ ], "title": "memorystore_v1_generated_memorystore_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.memorystore_v1.MemorystoreClient", + "shortName": "MemorystoreClient" + }, + "fullName": "google.cloud.memorystore_v1.MemorystoreClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.memorystore.v1.Memorystore.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.memorystore.v1.Memorystore", + "shortName": "Memorystore" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.memorystore_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.memorystore_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "memorystore_v1_generated_memorystore_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "memorystore_v1_generated_Memorystore_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "memorystore_v1_generated_memorystore_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { @@ -471,12 +1115,12 @@ "regionTag": "memorystore_v1_generated_Memorystore_UpdateInstance_sync", "segments": [ { - "end": 60, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 54, "start": 27, "type": "SHORT" }, @@ -486,18 +1130,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py index 0077127dc399..1cb59a700493 100644 --- a/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py +++ b/packages/google-cloud-memorystore/scripts/fixup_memorystore_v1_keywords.py @@ -39,11 +39,19 @@ def partition( class memorystoreCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'backup_instance': ('name', 'ttl', 'backup_id', ), 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), + 'delete_backup': ('name', 'request_id', ), 'delete_instance': ('name', 'request_id', ), + 'export_backup': ('name', 'gcs_bucket', ), + 'get_backup': ('name', ), + 'get_backup_collection': ('name', ), 'get_certificate_authority': ('name', ), 'get_instance': ('name', ), + 'list_backup_collections': ('parent', 'page_size', 'page_token', ), + 'list_backups': ('parent', 'page_size', 'page_token', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'reschedule_maintenance': ('name', 'reschedule_type', 'schedule_time', ), 'update_instance': ('instance', 'update_mask', 'request_id', ), } diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py index 7e8d8bc44554..46c0963127dc 100644 --- a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py @@ -62,9 +62,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore from google.cloud.memorystore_v1.services.memorystore import ( MemorystoreClient, @@ -1545,7 +1548,9 @@ def test_create_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - instance=memorystore.Instance(name="name_value"), + instance=memorystore.Instance( + gcs_source=memorystore.Instance.GcsBackupSource(uris=["uris_value"]) + ), instance_id="instance_id_value", ) mock_args.update(sample_request) @@ -1582,7 +1587,9 @@ def test_create_instance_rest_flattened_error(transport: str = "rest"): client.create_instance( memorystore.CreateInstanceRequest(), parent="parent_value", - instance=memorystore.Instance(name="name_value"), + instance=memorystore.Instance( + gcs_source=memorystore.Instance.GcsBackupSource(uris=["uris_value"]) + ), instance_id="instance_id_value", ) @@ -1737,7 +1744,9 @@ def test_update_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - instance=memorystore.Instance(name="name_value"), + instance=memorystore.Instance( + gcs_source=memorystore.Instance.GcsBackupSource(uris=["uris_value"]) + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -1774,7 +1783,9 @@ def test_update_instance_rest_flattened_error(transport: str = "rest"): with pytest.raises(ValueError): client.update_instance( memorystore.UpdateInstanceRequest(), - instance=memorystore.Instance(name="name_value"), + instance=memorystore.Instance( + gcs_source=memorystore.Instance.GcsBackupSource(uris=["uris_value"]) + ), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -2144,181 +2155,2997 @@ def test_get_certificate_authority_rest_flattened_error(transport: str = "rest") ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MemorystoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.MemorystoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MemorystoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.MemorystoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MemorystoreClient( - client_options=options, - transport=transport, + # Ensure method has been cached + assert ( + client._transport.reschedule_maintenance + in client._transport._wrapped_methods ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MemorystoreClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.reschedule_maintenance + ] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.MemorystoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MemorystoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MemorystoreRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.reschedule_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_reschedule_maintenance_rest_required_fields( + request_type=memorystore.RescheduleMaintenanceRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - client = MemorystoreClient(transport=transport) - assert client.transport is transport + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.MemorystoreRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_transport_kind_rest(): - transport = MemorystoreClient.get_transport_class("rest")( + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instances(request) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) -@pytest.mark.parametrize( - "request_type", - [ - memorystore.ListInstancesRequest, - dict, - ], -) -def test_list_instances_rest_call_success(request_type): - client = MemorystoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.reschedule_maintenance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_reschedule_maintenance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "rescheduleType", + ) + ) + ) + + +def test_reschedule_maintenance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = memorystore.ListInstancesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + reschedule_type=memorystore.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), ) + mock_args.update(sample_request) # Wrap the value into a proper Response obj - response_value = mock.Mock() + response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = memorystore.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instances(request) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + client.reschedule_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance" + % client.transport._host, + args[1], + ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.MemorystoreRestTransport( +def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.MemorystoreRestInterceptor(), + transport=transport, ) - client = MemorystoreClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_list_instances" - ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_list_instances_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_list_instances" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = memorystore.ListInstancesRequest.pb( - memorystore.ListInstancesRequest() - ) - transcode.return_value = { + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + memorystore.RescheduleMaintenanceRequest(), + name="name_value", + reschedule_type=memorystore.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_list_backup_collections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_collections + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_collections + ] = mock_rpc + + request = {} + client.list_backup_collections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_collections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_collections_rest_required_fields( + request_type=memorystore.ListBackupCollectionsRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_collections._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_collections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupCollectionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListBackupCollectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_collections(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_collections_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_collections._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_backup_collections_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupCollectionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.ListBackupCollectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_collections(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupCollections" + % client.transport._host, + args[1], + ) + + +def test_list_backup_collections_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_collections( + memorystore.ListBackupCollectionsRequest(), + parent="parent_value", + ) + + +def test_list_backup_collections_rest_pager(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + memorystore.ListBackupCollectionsResponse( + backup_collections=[ + memorystore.BackupCollection(), + memorystore.BackupCollection(), + memorystore.BackupCollection(), + ], + next_page_token="abc", + ), + memorystore.ListBackupCollectionsResponse( + backup_collections=[], + next_page_token="def", + ), + memorystore.ListBackupCollectionsResponse( + backup_collections=[ + memorystore.BackupCollection(), + ], + next_page_token="ghi", + ), + memorystore.ListBackupCollectionsResponse( + backup_collections=[ + memorystore.BackupCollection(), + memorystore.BackupCollection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + memorystore.ListBackupCollectionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_collections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, memorystore.BackupCollection) for i in results) + + pages = list(client.list_backup_collections(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backup_collection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_collection + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_collection + ] = mock_rpc + + request = {} + client.get_backup_collection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_collection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_collection_rest_required_fields( + request_type=memorystore.GetBackupCollectionRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_collection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_collection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.BackupCollection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.BackupCollection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup_collection(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_collection_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_collection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_backup_collection_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.BackupCollection() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.BackupCollection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup_collection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupCollections/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_collection_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_collection( + memorystore.GetBackupCollectionRequest(), + name="name_value", + ) + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=memorystore.ListBackupsRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_backups_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupCollections/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupCollections/*}/backups" + % client.transport._host, + args[1], + ) + + +def test_list_backups_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + memorystore.ListBackupsRequest(), + parent="parent_value", + ) + + +def test_list_backups_rest_pager(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + memorystore.ListBackupsResponse( + backups=[ + memorystore.Backup(), + memorystore.Backup(), + memorystore.Backup(), + ], + next_page_token="abc", + ), + memorystore.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + memorystore.ListBackupsResponse( + backups=[ + memorystore.Backup(), + ], + next_page_token="ghi", + ), + memorystore.ListBackupsResponse( + backups=[ + memorystore.Backup(), + memorystore.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(memorystore.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupCollections/sample3" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, memorystore.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=memorystore.GetBackupRequest): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = memorystore.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_backup_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = memorystore.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupCollections/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + memorystore.GetBackupRequest(), + name="name_value", + ) + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields( + request_type=memorystore.DeleteBackupRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_backup_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupCollections/*/backups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + memorystore.DeleteBackupRequest(), + name="name_value", + ) + + +def test_export_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_backup] = mock_rpc + + request = {} + client.export_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_backup_rest_required_fields( + request_type=memorystore.ExportBackupRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.export_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_backup_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_backup_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.backup_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.backup_instance] = mock_rpc + + request = {} + client.backup_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.backup_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_backup_instance_rest_required_fields( + request_type=memorystore.BackupInstanceRequest, +): + transport_class = transports.MemorystoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).backup_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).backup_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.backup_instance(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_backup_instance_rest_unset_required_fields(): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.backup_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_backup_instance_rest_flattened(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.backup_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:backup" + % client.transport._host, + args[1], + ) + + +def test_backup_instance_rest_flattened_error(transport: str = "rest"): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.backup_instance( + memorystore.BackupInstanceRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MemorystoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MemorystoreClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MemorystoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = MemorystoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instances_rest_bad_request(request_type=memorystore.ListInstancesRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instances(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListInstancesRequest, + dict, + ], +) +def test_list_instances_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_instances" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.ListInstancesRequest.pb( + memorystore.ListInstancesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = memorystore.ListInstancesResponse.to_json( + memorystore.ListInstancesResponse() + ) + req.return_value.content = return_value + + request = memorystore.ListInstancesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.ListInstancesResponse() + post_with_metadata.return_value = memorystore.ListInstancesResponse(), metadata + + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetInstanceRequest, + dict, + ], +) +def test_get_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.Instance( + name="name_value", + state=memorystore.Instance.State.CREATING, + uid="uid_value", + replica_count=1384, + authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, + transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, + shard_count=1178, + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + engine_version="engine_version_value", + deletion_protection_enabled=True, + mode=memorystore.Instance.Mode.STANDALONE, + ondemand_maintenance=True, + async_instance_endpoints_deletion_enabled=True, + backup_collection="backup_collection_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.Instance) + assert response.name == "name_value" + assert response.state == memorystore.Instance.State.CREATING + assert response.uid == "uid_value" + assert response.replica_count == 1384 + assert ( + response.authorization_mode + == memorystore.Instance.AuthorizationMode.AUTH_DISABLED + ) + assert ( + response.transit_encryption_mode + == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED + ) + assert response.shard_count == 1178 + assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO + assert response.engine_version == "engine_version_value" + assert response.deletion_protection_enabled is True + assert response.mode == memorystore.Instance.Mode.STANDALONE + assert response.ondemand_maintenance is True + assert response.async_instance_endpoints_deletion_enabled is True + assert response.backup_collection == "backup_collection_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = memorystore.Instance.to_json(memorystore.Instance()) + req.return_value.content = return_value + + request = memorystore.GetInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.Instance() + post_with_metadata.return_value = memorystore.Instance(), metadata + + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_rest_bad_request( + request_type=memorystore.CreateInstanceRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.CreateInstanceRequest, + dict, + ], +) +def test_create_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "gcs_source": {"uris": ["uris_value1", "uris_value2"]}, + "managed_backup_source": {"backup": "backup_value"}, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": { + "target_shard_count": 1920, + "target_replica_count": 2126, + "target_engine_version": "target_engine_version_value", + "target_node_type": 1, + } + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "psc_attachment_details": [ + {"service_attachment": "service_attachment_value", "connection_type": 1} + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + "ondemand_maintenance": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + } + ], + }, + "maintenance_schedule": {"start_time": {}, "end_time": {}}, + "cross_instance_replication_config": { + "instance_role": 1, + "primary_instance": {"instance": "instance_value", "uid": "uid_value"}, + "secondary_instances": {}, + "update_time": {}, + "membership": {"primary_instance": {}, "secondary_instances": {}}, + }, + "async_instance_endpoints_deletion_enabled": True, + "backup_collection": "backup_collection_value", + "automated_backup_config": { + "fixed_frequency_schedule": {"start_time": {}}, + "automated_backup_mode": 1, + "retention": {"seconds": 751, "nanos": 543}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_create_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.CreateInstanceRequest.pb( + memorystore.CreateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.CreateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_rest_bad_request( + request_type=memorystore.UpdateInstanceRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "gcs_source": {"uris": ["uris_value1", "uris_value2"]}, + "managed_backup_source": {"backup": "backup_value"}, + "name": "projects/sample1/locations/sample2/instances/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "state_info": { + "update_info": { + "target_shard_count": 1920, + "target_replica_count": 2126, + "target_engine_version": "target_engine_version_value", + "target_node_type": 1, + } + }, + "uid": "uid_value", + "replica_count": 1384, + "authorization_mode": 1, + "transit_encryption_mode": 1, + "shard_count": 1178, + "discovery_endpoints": [ + {"address": "address_value", "port": 453, "network": "network_value"} + ], + "node_type": 1, + "persistence_config": { + "mode": 1, + "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, + "aof_config": {"append_fsync": 1}, + }, + "engine_version": "engine_version_value", + "engine_configs": {}, + "node_config": {"size_gb": 0.739}, + "zone_distribution_config": {"zone": "zone_value", "mode": 1}, + "deletion_protection_enabled": True, + "psc_auto_connections": [ + { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + } + ], + "psc_attachment_details": [ + {"service_attachment": "service_attachment_value", "connection_type": 1} + ], + "endpoints": [ + { + "connections": [ + { + "psc_auto_connection": {}, + "psc_connection": { + "port": 453, + "psc_connection_id": "psc_connection_id_value", + "ip_address": "ip_address_value", + "forwarding_rule": "forwarding_rule_value", + "project_id": "project_id_value", + "network": "network_value", + "service_attachment": "service_attachment_value", + "psc_connection_status": 1, + "connection_type": 1, + }, + } + ] + } + ], + "mode": 1, + "ondemand_maintenance": True, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + } + ], + }, + "maintenance_schedule": {"start_time": {}, "end_time": {}}, + "cross_instance_replication_config": { + "instance_role": 1, + "primary_instance": {"instance": "instance_value", "uid": "uid_value"}, + "secondary_instances": {}, + "update_time": {}, + "membership": {"primary_instance": {}, "secondary_instances": {}}, + }, + "async_instance_endpoints_deletion_enabled": True, + "backup_collection": "backup_collection_value", + "automated_backup_config": { + "fixed_frequency_schedule": {"start_time": {}}, + "automated_backup_mode": 1, + "retention": {"seconds": 751, "nanos": 543}, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = memorystore.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_update_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.UpdateInstanceRequest.pb( + memorystore.UpdateInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.UpdateInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_rest_bad_request( + request_type=memorystore.DeleteInstanceRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_delete_instance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.DeleteInstanceRequest.pb( + memorystore.DeleteInstanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.DeleteInstanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_certificate_authority_rest_bad_request( + request_type=memorystore.GetCertificateAuthorityRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_certificate_authority(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetCertificateAuthorityRequest, + dict, + ], +) +def test_get_certificate_authority_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.CertificateAuthority( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.CertificateAuthority.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_certificate_authority(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, memorystore.CertificateAuthority) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_certificate_authority_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_get_certificate_authority_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.GetCertificateAuthorityRequest.pb( + memorystore.GetCertificateAuthorityRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = memorystore.CertificateAuthority.to_json( + memorystore.CertificateAuthority() + ) + req.return_value.content = return_value + + request = memorystore.GetCertificateAuthorityRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.CertificateAuthority() + post_with_metadata.return_value = memorystore.CertificateAuthority(), metadata + + client.get_certificate_authority( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request( + request_type=memorystore.RescheduleMaintenanceRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.reschedule_maintenance(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_reschedule_maintenance" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_reschedule_maintenance_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_reschedule_maintenance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.RescheduleMaintenanceRequest.pb( + memorystore.RescheduleMaintenanceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = memorystore.RescheduleMaintenanceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.reschedule_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backup_collections_rest_bad_request( + request_type=memorystore.ListBackupCollectionsRequest, +): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_collections(request) + + +@pytest.mark.parametrize( + "request_type", + [ + memorystore.ListBackupCollectionsRequest, + dict, + ], +) +def test_list_backup_collections_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupCollectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.ListBackupCollectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_collections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupCollectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_collections_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_backup_collections" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_list_backup_collections_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_backup_collections" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.ListBackupCollectionsRequest.pb( + memorystore.ListBackupCollectionsRequest() + ) + transcode.return_value = { "method": "post", "uri": "my_uri", "body": pb_message, @@ -2328,21 +5155,24 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = memorystore.ListInstancesResponse.to_json( - memorystore.ListInstancesResponse() + return_value = memorystore.ListBackupCollectionsResponse.to_json( + memorystore.ListBackupCollectionsResponse() ) req.return_value.content = return_value - request = memorystore.ListInstancesRequest() + request = memorystore.ListBackupCollectionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = memorystore.ListInstancesResponse() - post_with_metadata.return_value = memorystore.ListInstancesResponse(), metadata + post.return_value = memorystore.ListBackupCollectionsResponse() + post_with_metadata.return_value = ( + memorystore.ListBackupCollectionsResponse(), + metadata, + ) - client.list_instances( + client.list_backup_collections( request, metadata=[ ("key", "val"), @@ -2355,12 +5185,16 @@ def test_list_instances_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): +def test_get_backup_collection_rest_bad_request( + request_type=memorystore.GetBackupCollectionRequest, +): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2375,40 +5209,36 @@ def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance(request) + client.get_backup_collection(request) @pytest.mark.parametrize( "request_type", [ - memorystore.GetInstanceRequest, + memorystore.GetBackupCollectionRequest, dict, ], ) -def test_get_instance_rest_call_success(request_type): +def test_get_backup_collection_rest_call_success(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = memorystore.Instance( + return_value = memorystore.BackupCollection( name="name_value", - state=memorystore.Instance.State.CREATING, + instance_uid="instance_uid_value", + instance="instance_value", + kms_key="kms_key_value", uid="uid_value", - replica_count=1384, - authorization_mode=memorystore.Instance.AuthorizationMode.AUTH_DISABLED, - transit_encryption_mode=memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED, - shard_count=1178, - node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, - engine_version="engine_version_value", - deletion_protection_enabled=True, - mode=memorystore.Instance.Mode.STANDALONE, ) # Wrap the value into a proper Response obj @@ -2416,36 +5246,24 @@ def test_get_instance_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = memorystore.Instance.pb(return_value) + return_value = memorystore.BackupCollection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance(request) + response = client.get_backup_collection(request) # Establish that the response is the type that we expect. - assert isinstance(response, memorystore.Instance) + assert isinstance(response, memorystore.BackupCollection) assert response.name == "name_value" - assert response.state == memorystore.Instance.State.CREATING + assert response.instance_uid == "instance_uid_value" + assert response.instance == "instance_value" + assert response.kms_key == "kms_key_value" assert response.uid == "uid_value" - assert response.replica_count == 1384 - assert ( - response.authorization_mode - == memorystore.Instance.AuthorizationMode.AUTH_DISABLED - ) - assert ( - response.transit_encryption_mode - == memorystore.Instance.TransitEncryptionMode.TRANSIT_ENCRYPTION_DISABLED - ) - assert response.shard_count == 1178 - assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO - assert response.engine_version == "engine_version_value" - assert response.deletion_protection_enabled is True - assert response.mode == memorystore.Instance.Mode.STANDALONE @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): +def test_get_backup_collection_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2459,16 +5277,19 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_get_instance" + transports.MemorystoreRestInterceptor, "post_get_backup_collection" ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_get_instance_with_metadata" + transports.MemorystoreRestInterceptor, + "post_get_backup_collection_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_get_instance" + transports.MemorystoreRestInterceptor, "pre_get_backup_collection" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) + pb_message = memorystore.GetBackupCollectionRequest.pb( + memorystore.GetBackupCollectionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2479,19 +5300,21 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = memorystore.Instance.to_json(memorystore.Instance()) + return_value = memorystore.BackupCollection.to_json( + memorystore.BackupCollection() + ) req.return_value.content = return_value - request = memorystore.GetInstanceRequest() + request = memorystore.GetBackupCollectionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = memorystore.Instance() - post_with_metadata.return_value = memorystore.Instance(), metadata + post.return_value = memorystore.BackupCollection() + post_with_metadata.return_value = memorystore.BackupCollection(), metadata - client.get_instance( + client.get_backup_collection( request, metadata=[ ("key", "val"), @@ -2504,14 +5327,14 @@ def test_get_instance_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_instance_rest_bad_request( - request_type=memorystore.CreateInstanceRequest, -): +def test_list_backups_rest_bad_request(request_type=memorystore.ListBackupsRequest): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/backupCollections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2526,174 +5349,204 @@ def test_create_instance_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance(request) + client.list_backups(request) @pytest.mark.parametrize( "request_type", [ - memorystore.CreateInstanceRequest, + memorystore.ListBackupsRequest, dict, ], ) -def test_create_instance_rest_call_success(request_type): +def test_list_backups_rest_call_success(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["instance"] = { - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "shard_count": 1178, - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "network": "network_value"} - ], - "node_type": 1, - "persistence_config": { - "mode": 1, - "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, - "aof_config": {"append_fsync": 1}, - }, - "engine_version": "engine_version_value", - "engine_configs": {}, - "node_config": {"size_gb": 0.739}, - "zone_distribution_config": {"zone": "zone_value", "mode": 1}, - "deletion_protection_enabled": True, - "psc_auto_connections": [ - { - "port": 453, - "psc_connection_id": "psc_connection_id_value", - "ip_address": "ip_address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - "service_attachment": "service_attachment_value", - "psc_connection_status": 1, - "connection_type": 1, - } - ], - "endpoints": [ - { - "connections": [ - { - "psc_auto_connection": {}, - "psc_connection": { - "psc_connection_id": "psc_connection_id_value", - "ip_address": "ip_address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - "service_attachment": "service_attachment_value", - "psc_connection_status": 1, - "connection_type": 1, - }, - } - ] - } - ], - "mode": 1, + request_init = { + "parent": "projects/sample1/locations/sample2/backupCollections/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = memorystore.CreateInstanceRequest.meta.fields["instance"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = memorystore.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Convert return value to protobuf type + return_value = memorystore.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.MemorystoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MemorystoreRestInterceptor(), + ) + client = MemorystoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_backups" + ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.MemorystoreRestInterceptor, "pre_list_backups" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = memorystore.ListBackupsRequest.pb(memorystore.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = memorystore.ListBackupsResponse.to_json( + memorystore.ListBackupsResponse() + ) + req.return_value.content = return_value + + request = memorystore.ListBackupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = memorystore.ListBackupsResponse() + post_with_metadata.return_value = memorystore.ListBackupsResponse(), metadata + + client.list_backups( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_get_backup_rest_bad_request(request_type=memorystore.GetBackupRequest): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + memorystore.GetBackupRequest, + dict, + ], +) +def test_get_backup_rest_call_success(request_type): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = memorystore.Backup( + name="name_value", + instance="instance_value", + instance_uid="instance_uid_value", + total_size_bytes=1732, + engine_version="engine_version_value", + node_type=memorystore.Instance.NodeType.SHARED_CORE_NANO, + replica_count=1384, + shard_count=1178, + backup_type=memorystore.Backup.BackupType.ON_DEMAND, + state=memorystore.Backup.State.CREATING, + uid="uid_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = memorystore.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance(request) + response = client.get_backup(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, memorystore.Backup) + assert response.name == "name_value" + assert response.instance == "instance_value" + assert response.instance_uid == "instance_uid_value" + assert response.total_size_bytes == 1732 + assert response.engine_version == "engine_version_value" + assert response.node_type == memorystore.Instance.NodeType.SHARED_CORE_NANO + assert response.replica_count == 1384 + assert response.shard_count == 1178 + assert response.backup_type == memorystore.Backup.BackupType.ON_DEMAND + assert response.state == memorystore.Backup.State.CREATING + assert response.uid == "uid_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): +def test_get_backup_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2707,20 +5560,16 @@ def test_create_instance_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.MemorystoreRestInterceptor, "post_create_instance" + transports.MemorystoreRestInterceptor, "post_get_backup" ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_create_instance_with_metadata" + transports.MemorystoreRestInterceptor, "post_get_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_create_instance" + transports.MemorystoreRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = memorystore.CreateInstanceRequest.pb( - memorystore.CreateInstanceRequest() - ) + pb_message = memorystore.GetBackupRequest.pb(memorystore.GetBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2731,19 +5580,19 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = memorystore.Backup.to_json(memorystore.Backup()) req.return_value.content = return_value - request = memorystore.CreateInstanceRequest() + request = memorystore.GetBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = memorystore.Backup() + post_with_metadata.return_value = memorystore.Backup(), metadata - client.create_instance( + client.get_backup( request, metadata=[ ("key", "val"), @@ -2756,15 +5605,13 @@ def test_create_instance_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_instance_rest_bad_request( - request_type=memorystore.UpdateInstanceRequest, -): +def test_delete_backup_rest_bad_request(request_type=memorystore.DeleteBackupRequest): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" } request = request_type(**request_init) @@ -2780,154 +5627,25 @@ def test_update_instance_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance(request) + client.delete_backup(request) @pytest.mark.parametrize( "request_type", [ - memorystore.UpdateInstanceRequest, + memorystore.DeleteBackupRequest, dict, ], ) -def test_update_instance_rest_call_success(request_type): +def test_delete_backup_rest_call_success(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} - } - request_init["instance"] = { - "name": "projects/sample1/locations/sample2/instances/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "state_info": { - "update_info": {"target_shard_count": 1920, "target_replica_count": 2126} - }, - "uid": "uid_value", - "replica_count": 1384, - "authorization_mode": 1, - "transit_encryption_mode": 1, - "shard_count": 1178, - "discovery_endpoints": [ - {"address": "address_value", "port": 453, "network": "network_value"} - ], - "node_type": 1, - "persistence_config": { - "mode": 1, - "rdb_config": {"rdb_snapshot_period": 1, "rdb_snapshot_start_time": {}}, - "aof_config": {"append_fsync": 1}, - }, - "engine_version": "engine_version_value", - "engine_configs": {}, - "node_config": {"size_gb": 0.739}, - "zone_distribution_config": {"zone": "zone_value", "mode": 1}, - "deletion_protection_enabled": True, - "psc_auto_connections": [ - { - "port": 453, - "psc_connection_id": "psc_connection_id_value", - "ip_address": "ip_address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - "service_attachment": "service_attachment_value", - "psc_connection_status": 1, - "connection_type": 1, - } - ], - "endpoints": [ - { - "connections": [ - { - "psc_auto_connection": {}, - "psc_connection": { - "psc_connection_id": "psc_connection_id_value", - "ip_address": "ip_address_value", - "forwarding_rule": "forwarding_rule_value", - "project_id": "project_id_value", - "network": "network_value", - "service_attachment": "service_attachment_value", - "psc_connection_status": 1, - "connection_type": 1, - }, - } - ] - } - ], - "mode": 1, + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = memorystore.UpdateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2942,14 +5660,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2965,17 +5683,17 @@ def test_update_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.MemorystoreRestInterceptor, "post_update_instance" + transports.MemorystoreRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_update_instance_with_metadata" + transports.MemorystoreRestInterceptor, "post_delete_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_update_instance" + transports.MemorystoreRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = memorystore.UpdateInstanceRequest.pb( - memorystore.UpdateInstanceRequest() + pb_message = memorystore.DeleteBackupRequest.pb( + memorystore.DeleteBackupRequest() ) transcode.return_value = { "method": "post", @@ -2990,7 +5708,7 @@ def test_update_instance_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = memorystore.UpdateInstanceRequest() + request = memorystore.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2999,7 +5717,7 @@ def test_update_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_instance( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -3012,14 +5730,14 @@ def test_update_instance_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_instance_rest_bad_request( - request_type=memorystore.DeleteInstanceRequest, -): +def test_export_backup_rest_bad_request(request_type=memorystore.ExportBackupRequest): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3034,23 +5752,25 @@ def test_delete_instance_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance(request) + client.export_backup(request) @pytest.mark.parametrize( "request_type", [ - memorystore.DeleteInstanceRequest, + memorystore.ExportBackupRequest, dict, ], ) -def test_delete_instance_rest_call_success(request_type): +def test_export_backup_rest_call_success(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupCollections/sample3/backups/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3065,14 +5785,14 @@ def test_delete_instance_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance(request) + response = client.export_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): +def test_export_backup_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3088,17 +5808,17 @@ def test_delete_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.MemorystoreRestInterceptor, "post_delete_instance" + transports.MemorystoreRestInterceptor, "post_export_backup" ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_delete_instance_with_metadata" + transports.MemorystoreRestInterceptor, "post_export_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_delete_instance" + transports.MemorystoreRestInterceptor, "pre_export_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = memorystore.DeleteInstanceRequest.pb( - memorystore.DeleteInstanceRequest() + pb_message = memorystore.ExportBackupRequest.pb( + memorystore.ExportBackupRequest() ) transcode.return_value = { "method": "post", @@ -3113,7 +5833,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = memorystore.DeleteInstanceRequest() + request = memorystore.ExportBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3122,7 +5842,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_instance( + client.export_backup( request, metadata=[ ("key", "val"), @@ -3135,8 +5855,8 @@ def test_delete_instance_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_certificate_authority_rest_bad_request( - request_type=memorystore.GetCertificateAuthorityRequest, +def test_backup_instance_rest_bad_request( + request_type=memorystore.BackupInstanceRequest, ): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -3157,17 +5877,17 @@ def test_get_certificate_authority_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_certificate_authority(request) + client.backup_instance(request) @pytest.mark.parametrize( "request_type", [ - memorystore.GetCertificateAuthorityRequest, + memorystore.BackupInstanceRequest, dict, ], ) -def test_get_certificate_authority_rest_call_success(request_type): +def test_backup_instance_rest_call_success(request_type): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3179,29 +5899,23 @@ def test_get_certificate_authority_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = memorystore.CertificateAuthority( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = memorystore.CertificateAuthority.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_certificate_authority(request) + response = client.backup_instance(request) # Establish that the response is the type that we expect. - assert isinstance(response, memorystore.CertificateAuthority) - assert response.name == "name_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_certificate_authority_rest_interceptors(null_interceptor): +def test_backup_instance_rest_interceptors(null_interceptor): transport = transports.MemorystoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3215,18 +5929,19 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MemorystoreRestInterceptor, "post_get_certificate_authority" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.MemorystoreRestInterceptor, "post_backup_instance" ) as post, mock.patch.object( - transports.MemorystoreRestInterceptor, - "post_get_certificate_authority_with_metadata", + transports.MemorystoreRestInterceptor, "post_backup_instance_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" + transports.MemorystoreRestInterceptor, "pre_backup_instance" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = memorystore.GetCertificateAuthorityRequest.pb( - memorystore.GetCertificateAuthorityRequest() + pb_message = memorystore.BackupInstanceRequest.pb( + memorystore.BackupInstanceRequest() ) transcode.return_value = { "method": "post", @@ -3238,21 +5953,19 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = memorystore.CertificateAuthority.to_json( - memorystore.CertificateAuthority() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = memorystore.GetCertificateAuthorityRequest() + request = memorystore.BackupInstanceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = memorystore.CertificateAuthority() - post_with_metadata.return_value = memorystore.CertificateAuthority(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_certificate_authority( + client.backup_instance( request, metadata=[ ("key", "val"), @@ -3762,6 +6475,172 @@ def test_get_certificate_authority_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reschedule_maintenance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_collections_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_collections), "__call__" + ) as call: + client.list_backup_collections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.ListBackupCollectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_collection_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_collection), "__call__" + ) as call: + client.get_backup_collection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetBackupCollectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_backup_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_backup), "__call__") as call: + client.export_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.ExportBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_backup_instance_empty_call_rest(): + client = MemorystoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.backup_instance), "__call__") as call: + client.backup_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = memorystore.BackupInstanceRequest() + + assert args[0] == request_msg + + def test_memorystore_rest_lro_client(): client = MemorystoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3807,6 +6686,14 @@ def test_memorystore_base_transport(): "update_instance", "delete_instance", "get_certificate_authority", + "reschedule_maintenance", + "list_backup_collections", + "get_backup_collection", + "list_backups", + "get_backup", + "delete_backup", + "export_backup", + "backup_instance", "get_location", "list_locations", "get_operation", @@ -3967,12 +6854,93 @@ def test_memorystore_client_transport_session_collision(transport_name): session1 = client1.transport.get_certificate_authority._session session2 = client2.transport.get_certificate_authority._session assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 + session1 = client1.transport.list_backup_collections._session + session2 = client2.transport.list_backup_collections._session + assert session1 != session2 + session1 = client1.transport.get_backup_collection._session + session2 = client2.transport.get_backup_collection._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.export_backup._session + session2 = client2.transport.export_backup._session + assert session1 != session2 + session1 = client1.transport.backup_instance._session + session2 = client2.transport.backup_instance._session + assert session1 != session2 -def test_certificate_authority_path(): +def test_backup_path(): project = "squid" location = "clam" - instance = "whelk" + backup_collection = "whelk" + backup = "octopus" + expected = "projects/{project}/locations/{location}/backupCollections/{backup_collection}/backups/{backup}".format( + project=project, + location=location, + backup_collection=backup_collection, + backup=backup, + ) + actual = MemorystoreClient.backup_path(project, location, backup_collection, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "backup_collection": "cuttlefish", + "backup": "mussel", + } + path = MemorystoreClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_backup_path(path) + assert expected == actual + + +def test_backup_collection_path(): + project = "winkle" + location = "nautilus" + backup_collection = "scallop" + expected = "projects/{project}/locations/{location}/backupCollections/{backup_collection}".format( + project=project, + location=location, + backup_collection=backup_collection, + ) + actual = MemorystoreClient.backup_collection_path( + project, location, backup_collection + ) + assert expected == actual + + +def test_parse_backup_collection_path(): + expected = { + "project": "abalone", + "location": "squid", + "backup_collection": "clam", + } + path = MemorystoreClient.backup_collection_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_backup_collection_path(path) + assert expected == actual + + +def test_certificate_authority_path(): + project = "whelk" + location = "octopus" + instance = "oyster" expected = "projects/{project}/locations/{location}/instances/{instance}/certificateAuthority".format( project=project, location=location, @@ -3984,9 +6952,9 @@ def test_certificate_authority_path(): def test_parse_certificate_authority_path(): expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "instance": "mussel", } path = MemorystoreClient.certificate_authority_path(**expected) @@ -3995,10 +6963,39 @@ def test_parse_certificate_authority_path(): assert expected == actual +def test_crypto_key_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = MemorystoreClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "squid", + "location": "clam", + "key_ring": "whelk", + "crypto_key": "octopus", + } + path = MemorystoreClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = MemorystoreClient.parse_crypto_key_path(path) + assert expected == actual + + def test_forwarding_rule_path(): - project = "cuttlefish" - region = "mussel" - forwarding_rule = "winkle" + project = "oyster" + region = "nudibranch" + forwarding_rule = "cuttlefish" expected = ( "projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( project=project, @@ -4012,9 +7009,9 @@ def test_forwarding_rule_path(): def test_parse_forwarding_rule_path(): expected = { - "project": "nautilus", - "region": "scallop", - "forwarding_rule": "abalone", + "project": "mussel", + "region": "winkle", + "forwarding_rule": "nautilus", } path = MemorystoreClient.forwarding_rule_path(**expected) @@ -4024,9 +7021,9 @@ def test_parse_forwarding_rule_path(): def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" + project = "scallop" + location = "abalone" + instance = "squid" expected = "projects/{project}/locations/{location}/instances/{instance}".format( project=project, location=location, @@ -4038,9 +7035,9 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", + "project": "clam", + "location": "whelk", + "instance": "octopus", } path = MemorystoreClient.instance_path(**expected) @@ -4050,8 +7047,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "cuttlefish" - network = "mussel" + project = "oyster" + network = "nudibranch" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -4062,8 +7059,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "winkle", - "network": "nautilus", + "project": "cuttlefish", + "network": "mussel", } path = MemorystoreClient.network_path(**expected) @@ -4073,9 +7070,9 @@ def test_parse_network_path(): def test_service_attachment_path(): - project = "scallop" - region = "abalone" - service_attachment = "squid" + project = "winkle" + region = "nautilus" + service_attachment = "scallop" expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( project=project, region=region, @@ -4089,9 +7086,9 @@ def test_service_attachment_path(): def test_parse_service_attachment_path(): expected = { - "project": "clam", - "region": "whelk", - "service_attachment": "octopus", + "project": "abalone", + "region": "squid", + "service_attachment": "clam", } path = MemorystoreClient.service_attachment_path(**expected) @@ -4101,7 +7098,7 @@ def test_parse_service_attachment_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4111,7 +7108,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "octopus", } path = MemorystoreClient.common_billing_account_path(**expected) @@ -4121,7 +7118,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -4131,7 +7128,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nudibranch", } path = MemorystoreClient.common_folder_path(**expected) @@ -4141,7 +7138,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -4151,7 +7148,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "mussel", } path = MemorystoreClient.common_organization_path(**expected) @@ -4161,7 +7158,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -4171,7 +7168,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "nautilus", } path = MemorystoreClient.common_project_path(**expected) @@ -4181,8 +7178,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4193,8 +7190,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "squid", + "location": "clam", } path = MemorystoreClient.common_location_path(**expected)