diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py index 660dedc6ed5b..e92bcf8fcc2f 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/__init__.py @@ -34,10 +34,16 @@ CheckUpgradeResponse, CreateEnvironmentRequest, DatabaseConfig, + DatabaseFailoverRequest, + DatabaseFailoverResponse, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandRequest, + ExecuteAirflowCommandResponse, + FetchDatabasePropertiesRequest, + FetchDatabasePropertiesResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, @@ -48,6 +54,8 @@ MasterAuthorizedNetworksConfig, NetworkingConfig, NodeConfig, + PollAirflowCommandRequest, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, RecoveryConfig, @@ -55,6 +63,8 @@ SaveSnapshotResponse, ScheduledSnapshotsConfig, SoftwareConfig, + StopAirflowCommandRequest, + StopAirflowCommandResponse, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, @@ -77,10 +87,16 @@ "CheckUpgradeResponse", "CreateEnvironmentRequest", "DatabaseConfig", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", + "ExecuteAirflowCommandRequest", + "ExecuteAirflowCommandResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ListEnvironmentsRequest", @@ -91,6 +107,8 @@ "MasterAuthorizedNetworksConfig", "NetworkingConfig", "NodeConfig", + "PollAirflowCommandRequest", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", "RecoveryConfig", @@ -98,6 +116,8 @@ "SaveSnapshotResponse", "ScheduledSnapshotsConfig", "SoftwareConfig", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py index b334dccf50ad..405b1cebcf15 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py index e6c2aa5151f5..e506321a612b 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/__init__.py @@ -26,10 +26,16 @@ CheckUpgradeResponse, CreateEnvironmentRequest, DatabaseConfig, + DatabaseFailoverRequest, + DatabaseFailoverResponse, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandRequest, + ExecuteAirflowCommandResponse, + FetchDatabasePropertiesRequest, + FetchDatabasePropertiesResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, @@ -40,6 +46,8 @@ MasterAuthorizedNetworksConfig, NetworkingConfig, NodeConfig, + PollAirflowCommandRequest, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, RecoveryConfig, @@ -47,6 +55,8 @@ SaveSnapshotResponse, ScheduledSnapshotsConfig, SoftwareConfig, + StopAirflowCommandRequest, + StopAirflowCommandResponse, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, @@ -65,11 +75,17 @@ "CheckUpgradeResponse", "CreateEnvironmentRequest", "DatabaseConfig", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", "EnvironmentsClient", + "ExecuteAirflowCommandRequest", + "ExecuteAirflowCommandResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ImageVersion", @@ -85,6 +101,8 @@ "NetworkingConfig", "NodeConfig", "OperationMetadata", + "PollAirflowCommandRequest", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", "RecoveryConfig", @@ -92,6 +110,8 @@ "SaveSnapshotResponse", "ScheduledSnapshotsConfig", "SoftwareConfig", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json index 45eed94effa3..e35a8b09bf99 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_metadata.json @@ -15,11 +15,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -35,11 +50,21 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "SaveSnapshot": { "methods": [ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -55,11 +80,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -75,11 +115,21 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "SaveSnapshot": { "methods": [ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -95,11 +145,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -115,11 +180,21 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "SaveSnapshot": { "methods": [ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py index b334dccf50ad..405b1cebcf15 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py index 04ab575f0dde..411435e605ec 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/async_client.py @@ -994,6 +994,248 @@ async def sample_delete_environment(): # Done; return the response. return response + async def execute_airflow_command( + self, + request: Optional[ + Union[environments.ExecuteAirflowCommandRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Executes Airflow CLI command. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_execute_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = await client.execute_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandRequest, dict]]): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.ExecuteAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def stop_airflow_command( + self, + request: Optional[Union[environments.StopAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Stops Airflow CLI command execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_stop_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.StopAirflowCommandRequest( + ) + + # Make the request + response = await client.stop_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandRequest, dict]]): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.StopAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def poll_airflow_command( + self, + request: Optional[Union[environments.PollAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Polls Airflow CLI command execution and fetches logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_poll_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.PollAirflowCommandRequest( + ) + + # Make the request + response = await client.poll_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandRequest, dict]]): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.PollAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.poll_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def save_snapshot( self, request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, @@ -1192,6 +1434,188 @@ async def sample_load_snapshot(): # Done; return the response. return response + async def database_failover( + self, + request: Optional[Union[environments.DatabaseFailoverRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers database failover (only for highly resilient + environments). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_database_failover(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverRequest, dict]]): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverResponse` + Response for DatabaseFailoverRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.DatabaseFailoverRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.database_failover, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.DatabaseFailoverResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def fetch_database_properties( + self, + request: Optional[ + Union[environments.FetchDatabasePropertiesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Fetches database properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + async def sample_fetch_database_properties(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = await client.fetch_database_properties(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesRequest, dict]]): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.FetchDatabasePropertiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_database_properties, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py index cd16517552b4..0d6abb4c3438 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/client.py @@ -1227,6 +1227,251 @@ def sample_delete_environment(): # Done; return the response. return response + def execute_airflow_command( + self, + request: Optional[ + Union[environments.ExecuteAirflowCommandRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Executes Airflow CLI command. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_execute_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = client.execute_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandRequest, dict]): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.ExecuteAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.ExecuteAirflowCommandRequest): + request = environments.ExecuteAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_airflow_command( + self, + request: Optional[Union[environments.StopAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Stops Airflow CLI command execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_stop_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.StopAirflowCommandRequest( + ) + + # Make the request + response = client.stop_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandRequest, dict]): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.StopAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.StopAirflowCommandRequest): + request = environments.StopAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def poll_airflow_command( + self, + request: Optional[Union[environments.PollAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Polls Airflow CLI command execution and fetches logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_poll_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.PollAirflowCommandRequest( + ) + + # Make the request + response = client.poll_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandRequest, dict]): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.PollAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.PollAirflowCommandRequest): + request = environments.PollAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.poll_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def save_snapshot( self, request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, @@ -1427,6 +1672,192 @@ def sample_load_snapshot(): # Done; return the response. return response + def database_failover( + self, + request: Optional[Union[environments.DatabaseFailoverRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers database failover (only for highly resilient + environments). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_database_failover(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverRequest, dict]): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverResponse` + Response for DatabaseFailoverRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.DatabaseFailoverRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.DatabaseFailoverRequest): + request = environments.DatabaseFailoverRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.database_failover] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.DatabaseFailoverResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def fetch_database_properties( + self, + request: Optional[ + Union[environments.FetchDatabasePropertiesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Fetches database properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1 + + def sample_fetch_database_properties(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = client.fetch_database_properties(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesRequest, dict]): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.FetchDatabasePropertiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.FetchDatabasePropertiesRequest): + request = environments.FetchDatabasePropertiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_database_properties + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "EnvironmentsClient": return self diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py index 71a67880f9b4..d5bbbcf1de2f 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/base.py @@ -150,6 +150,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_airflow_command: gapic_v1.method.wrap_method( + self.execute_airflow_command, + default_timeout=None, + client_info=client_info, + ), + self.stop_airflow_command: gapic_v1.method.wrap_method( + self.stop_airflow_command, + default_timeout=None, + client_info=client_info, + ), + self.poll_airflow_command: gapic_v1.method.wrap_method( + self.poll_airflow_command, + default_timeout=None, + client_info=client_info, + ), self.save_snapshot: gapic_v1.method.wrap_method( self.save_snapshot, default_timeout=None, @@ -160,6 +175,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.database_failover: gapic_v1.method.wrap_method( + self.database_failover, + default_timeout=None, + client_info=client_info, + ), + self.fetch_database_properties: gapic_v1.method.wrap_method( + self.fetch_database_properties, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -224,6 +249,42 @@ def delete_environment( ]: raise NotImplementedError() + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + Union[ + environments.ExecuteAirflowCommandResponse, + Awaitable[environments.ExecuteAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + Union[ + environments.StopAirflowCommandResponse, + Awaitable[environments.StopAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + Union[ + environments.PollAirflowCommandResponse, + Awaitable[environments.PollAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + @property def save_snapshot( self, @@ -242,6 +303,27 @@ def load_snapshot( ]: raise NotImplementedError() + @property + def database_failover( + self, + ) -> Callable[ + [environments.DatabaseFailoverRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + Union[ + environments.FetchDatabasePropertiesResponse, + Awaitable[environments.FetchDatabasePropertiesResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py index 7e2ad1eabe9e..da1231e09f64 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc.py @@ -376,6 +376,93 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + environments.ExecuteAirflowCommandResponse, + ]: + r"""Return a callable for the execute airflow command method over gRPC. + + Executes Airflow CLI command. + + Returns: + Callable[[~.ExecuteAirflowCommandRequest], + ~.ExecuteAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_airflow_command" not in self._stubs: + self._stubs["execute_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/ExecuteAirflowCommand", + request_serializer=environments.ExecuteAirflowCommandRequest.serialize, + response_deserializer=environments.ExecuteAirflowCommandResponse.deserialize, + ) + return self._stubs["execute_airflow_command"] + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + environments.StopAirflowCommandResponse, + ]: + r"""Return a callable for the stop airflow command method over gRPC. + + Stops Airflow CLI command execution. + + Returns: + Callable[[~.StopAirflowCommandRequest], + ~.StopAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_airflow_command" not in self._stubs: + self._stubs["stop_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/StopAirflowCommand", + request_serializer=environments.StopAirflowCommandRequest.serialize, + response_deserializer=environments.StopAirflowCommandResponse.deserialize, + ) + return self._stubs["stop_airflow_command"] + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + environments.PollAirflowCommandResponse, + ]: + r"""Return a callable for the poll airflow command method over gRPC. + + Polls Airflow CLI command execution and fetches logs. + + Returns: + Callable[[~.PollAirflowCommandRequest], + ~.PollAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "poll_airflow_command" not in self._stubs: + self._stubs["poll_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/PollAirflowCommand", + request_serializer=environments.PollAirflowCommandRequest.serialize, + response_deserializer=environments.PollAirflowCommandResponse.deserialize, + ) + return self._stubs["poll_airflow_command"] + @property def save_snapshot( self, @@ -434,6 +521,62 @@ def load_snapshot( ) return self._stubs["load_snapshot"] + @property + def database_failover( + self, + ) -> Callable[[environments.DatabaseFailoverRequest], operations_pb2.Operation]: + r"""Return a callable for the database failover method over gRPC. + + Triggers database failover (only for highly resilient + environments). + + Returns: + Callable[[~.DatabaseFailoverRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "database_failover" not in self._stubs: + self._stubs["database_failover"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/DatabaseFailover", + request_serializer=environments.DatabaseFailoverRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["database_failover"] + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + environments.FetchDatabasePropertiesResponse, + ]: + r"""Return a callable for the fetch database properties method over gRPC. + + Fetches database properties. + + Returns: + Callable[[~.FetchDatabasePropertiesRequest], + ~.FetchDatabasePropertiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_database_properties" not in self._stubs: + self._stubs["fetch_database_properties"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/FetchDatabaseProperties", + request_serializer=environments.FetchDatabasePropertiesRequest.serialize, + response_deserializer=environments.FetchDatabasePropertiesResponse.deserialize, + ) + return self._stubs["fetch_database_properties"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py index 73305f8d1584..b2e2c0b5b659 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/grpc_asyncio.py @@ -390,6 +390,93 @@ def delete_environment( ) return self._stubs["delete_environment"] + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + Awaitable[environments.ExecuteAirflowCommandResponse], + ]: + r"""Return a callable for the execute airflow command method over gRPC. + + Executes Airflow CLI command. + + Returns: + Callable[[~.ExecuteAirflowCommandRequest], + Awaitable[~.ExecuteAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_airflow_command" not in self._stubs: + self._stubs["execute_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/ExecuteAirflowCommand", + request_serializer=environments.ExecuteAirflowCommandRequest.serialize, + response_deserializer=environments.ExecuteAirflowCommandResponse.deserialize, + ) + return self._stubs["execute_airflow_command"] + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + Awaitable[environments.StopAirflowCommandResponse], + ]: + r"""Return a callable for the stop airflow command method over gRPC. + + Stops Airflow CLI command execution. + + Returns: + Callable[[~.StopAirflowCommandRequest], + Awaitable[~.StopAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_airflow_command" not in self._stubs: + self._stubs["stop_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/StopAirflowCommand", + request_serializer=environments.StopAirflowCommandRequest.serialize, + response_deserializer=environments.StopAirflowCommandResponse.deserialize, + ) + return self._stubs["stop_airflow_command"] + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + Awaitable[environments.PollAirflowCommandResponse], + ]: + r"""Return a callable for the poll airflow command method over gRPC. + + Polls Airflow CLI command execution and fetches logs. + + Returns: + Callable[[~.PollAirflowCommandRequest], + Awaitable[~.PollAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "poll_airflow_command" not in self._stubs: + self._stubs["poll_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/PollAirflowCommand", + request_serializer=environments.PollAirflowCommandRequest.serialize, + response_deserializer=environments.PollAirflowCommandResponse.deserialize, + ) + return self._stubs["poll_airflow_command"] + @property def save_snapshot( self, @@ -452,6 +539,64 @@ def load_snapshot( ) return self._stubs["load_snapshot"] + @property + def database_failover( + self, + ) -> Callable[ + [environments.DatabaseFailoverRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the database failover method over gRPC. + + Triggers database failover (only for highly resilient + environments). + + Returns: + Callable[[~.DatabaseFailoverRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "database_failover" not in self._stubs: + self._stubs["database_failover"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/DatabaseFailover", + request_serializer=environments.DatabaseFailoverRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["database_failover"] + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + Awaitable[environments.FetchDatabasePropertiesResponse], + ]: + r"""Return a callable for the fetch database properties method over gRPC. + + Fetches database properties. + + Returns: + Callable[[~.FetchDatabasePropertiesRequest], + Awaitable[~.FetchDatabasePropertiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_database_properties" not in self._stubs: + self._stubs["fetch_database_properties"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1.Environments/FetchDatabaseProperties", + request_serializer=environments.FetchDatabasePropertiesRequest.serialize, + response_deserializer=environments.FetchDatabasePropertiesResponse.deserialize, + ) + return self._stubs["fetch_database_properties"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py index b356a9223f24..81a9ae84f88a 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/services/environments/transports/rest.py @@ -80,6 +80,14 @@ def post_create_environment(self, response): logging.log(f"Received response: {response}") return response + def pre_database_failover(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_database_failover(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -88,6 +96,22 @@ def post_delete_environment(self, response): logging.log(f"Received response: {response}") return response + def pre_execute_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_database_properties(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_database_properties(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -112,6 +136,14 @@ def post_load_snapshot(self, response): logging.log(f"Received response: {response}") return response + def pre_poll_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_poll_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + def pre_save_snapshot(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -120,6 +152,14 @@ def post_save_snapshot(self, response): logging.log(f"Received response: {response}") return response + def pre_stop_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -157,6 +197,29 @@ def post_create_environment( """ return response + def pre_database_failover( + self, + request: environments.DatabaseFailoverRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.DatabaseFailoverRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for database_failover + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_database_failover( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for database_failover + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_delete_environment( self, request: environments.DeleteEnvironmentRequest, @@ -180,6 +243,52 @@ def post_delete_environment( """ return response + def pre_execute_airflow_command( + self, + request: environments.ExecuteAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.ExecuteAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_execute_airflow_command( + self, response: environments.ExecuteAirflowCommandResponse + ) -> environments.ExecuteAirflowCommandResponse: + """Post-rpc interceptor for execute_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_fetch_database_properties( + self, + request: environments.FetchDatabasePropertiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.FetchDatabasePropertiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_database_properties + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_fetch_database_properties( + self, response: environments.FetchDatabasePropertiesResponse + ) -> environments.FetchDatabasePropertiesResponse: + """Post-rpc interceptor for fetch_database_properties + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_get_environment( self, request: environments.GetEnvironmentRequest, @@ -249,6 +358,29 @@ def post_load_snapshot( """ return response + def pre_poll_airflow_command( + self, + request: environments.PollAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.PollAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for poll_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_poll_airflow_command( + self, response: environments.PollAirflowCommandResponse + ) -> environments.PollAirflowCommandResponse: + """Post-rpc interceptor for poll_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_save_snapshot( self, request: environments.SaveSnapshotRequest, @@ -272,6 +404,29 @@ def post_save_snapshot( """ return response + def pre_stop_airflow_command( + self, + request: environments.StopAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.StopAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_stop_airflow_command( + self, response: environments.StopAirflowCommandResponse + ) -> environments.StopAirflowCommandResponse: + """Post-rpc interceptor for stop_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_update_environment( self, request: environments.UpdateEnvironmentRequest, @@ -593,6 +748,95 @@ def __call__( resp = self._interceptor.post_create_environment(resp) return resp + class _DatabaseFailover(EnvironmentsRestStub): + def __hash__(self): + return hash("DatabaseFailover") + + def __call__( + self, + request: environments.DatabaseFailoverRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the database failover method over HTTP. + + Args: + request (~.environments.DatabaseFailoverRequest): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:databaseFailover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_database_failover( + request, metadata + ) + pb_request = environments.DatabaseFailoverRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_database_failover(resp) + return resp + class _DeleteEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("DeleteEnvironment") @@ -671,6 +915,185 @@ def __call__( resp = self._interceptor.post_delete_environment(resp) return resp + class _ExecuteAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("ExecuteAirflowCommand") + + def __call__( + self, + request: environments.ExecuteAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Call the execute airflow command method over HTTP. + + Args: + request (~.environments.ExecuteAirflowCommandRequest): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:executeAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_execute_airflow_command( + request, metadata + ) + pb_request = environments.ExecuteAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.ExecuteAirflowCommandResponse() + pb_resp = environments.ExecuteAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_execute_airflow_command(resp) + return resp + + class _FetchDatabaseProperties(EnvironmentsRestStub): + def __hash__(self): + return hash("FetchDatabaseProperties") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environments.FetchDatabasePropertiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Call the fetch database properties method over HTTP. + + Args: + request (~.environments.FetchDatabasePropertiesRequest): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:fetchDatabaseProperties", + }, + ] + request, metadata = self._interceptor.pre_fetch_database_properties( + request, metadata + ) + pb_request = environments.FetchDatabasePropertiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.FetchDatabasePropertiesResponse() + pb_resp = environments.FetchDatabasePropertiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_database_properties(resp) + return resp + class _GetEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("GetEnvironment") @@ -914,6 +1337,94 @@ def __call__( resp = self._interceptor.post_load_snapshot(resp) return resp + class _PollAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("PollAirflowCommand") + + def __call__( + self, + request: environments.PollAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Call the poll airflow command method over HTTP. + + Args: + request (~.environments.PollAirflowCommandRequest): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:pollAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_poll_airflow_command( + request, metadata + ) + pb_request = environments.PollAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.PollAirflowCommandResponse() + pb_resp = environments.PollAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_poll_airflow_command(resp) + return resp + class _SaveSnapshot(EnvironmentsRestStub): def __hash__(self): return hash("SaveSnapshot") @@ -1000,6 +1511,94 @@ def __call__( resp = self._interceptor.post_save_snapshot(resp) return resp + class _StopAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("StopAirflowCommand") + + def __call__( + self, + request: environments.StopAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Call the stop airflow command method over HTTP. + + Args: + request (~.environments.StopAirflowCommandRequest): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{environment=projects/*/locations/*/environments/*}:stopAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_airflow_command( + request, metadata + ) + pb_request = environments.StopAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.StopAirflowCommandResponse() + pb_resp = environments.StopAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_airflow_command(resp) + return resp + class _UpdateEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("UpdateEnvironment") @@ -1095,6 +1694,14 @@ def create_environment( # In C++ this would require a dynamic_cast return self._CreateEnvironment(self._session, self._host, self._interceptor) # type: ignore + @property + def database_failover( + self, + ) -> Callable[[environments.DatabaseFailoverRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DatabaseFailover(self._session, self._host, self._interceptor) # type: ignore + @property def delete_environment( self, @@ -1103,6 +1710,28 @@ def delete_environment( # In C++ this would require a dynamic_cast return self._DeleteEnvironment(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + environments.ExecuteAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + environments.FetchDatabasePropertiesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDatabaseProperties(self._session, self._host, self._interceptor) # type: ignore + @property def get_environment( self, @@ -1129,6 +1758,17 @@ def load_snapshot( # In C++ this would require a dynamic_cast return self._LoadSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + environments.PollAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PollAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + @property def save_snapshot( self, @@ -1137,6 +1777,17 @@ def save_snapshot( # In C++ this would require a dynamic_cast return self._SaveSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + environments.StopAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + @property def update_environment( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py index 7f77a37d0ac2..669c4396c805 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/__init__.py @@ -17,10 +17,16 @@ CheckUpgradeResponse, CreateEnvironmentRequest, DatabaseConfig, + DatabaseFailoverRequest, + DatabaseFailoverResponse, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandRequest, + ExecuteAirflowCommandResponse, + FetchDatabasePropertiesRequest, + FetchDatabasePropertiesResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, @@ -31,6 +37,8 @@ MasterAuthorizedNetworksConfig, NetworkingConfig, NodeConfig, + PollAirflowCommandRequest, + PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, RecoveryConfig, @@ -38,6 +46,8 @@ SaveSnapshotResponse, ScheduledSnapshotsConfig, SoftwareConfig, + StopAirflowCommandRequest, + StopAirflowCommandResponse, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, @@ -54,10 +64,16 @@ "CheckUpgradeResponse", "CreateEnvironmentRequest", "DatabaseConfig", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", + "ExecuteAirflowCommandRequest", + "ExecuteAirflowCommandResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ListEnvironmentsRequest", @@ -68,6 +84,8 @@ "MasterAuthorizedNetworksConfig", "NetworkingConfig", "NodeConfig", + "PollAirflowCommandRequest", + "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", "RecoveryConfig", @@ -75,6 +93,8 @@ "SaveSnapshotResponse", "ScheduledSnapshotsConfig", "SoftwareConfig", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py index 8d2aab84a96e..f74a45942dbd 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/environments.py @@ -30,10 +30,20 @@ "ListEnvironmentsResponse", "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", + "ExecuteAirflowCommandRequest", + "ExecuteAirflowCommandResponse", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", + "PollAirflowCommandRequest", + "PollAirflowCommandResponse", "SaveSnapshotRequest", "SaveSnapshotResponse", "LoadSnapshotRequest", "LoadSnapshotResponse", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "DatabaseConfig", @@ -364,6 +374,256 @@ class UpdateEnvironmentRequest(proto.Message): ) +class ExecuteAirflowCommandRequest(proto.Message): + r"""Execute Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + command (str): + Airflow command. + subcommand (str): + Airflow subcommand. + parameters (MutableSequence[str]): + Parameters for the Airflow command/subcommand as an array of + arguments. It may contain positional arguments like + ``["my-dag-id"]``, key-value parameters like + ``["--foo=bar"]`` or ``["--foo","bar"]``, or other flags + like ``["-f"]``. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + command: str = proto.Field( + proto.STRING, + number=2, + ) + subcommand: str = proto.Field( + proto.STRING, + number=3, + ) + parameters: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class ExecuteAirflowCommandResponse(proto.Message): + r"""Response to ExecuteAirflowCommandRequest. + + Attributes: + execution_id (str): + The unique ID of the command execution for + polling. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + error (str): + Error message. Empty if there was no error. + """ + + execution_id: str = proto.Field( + proto.STRING, + number=1, + ) + pod: str = proto.Field( + proto.STRING, + number=2, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=3, + ) + error: str = proto.Field( + proto.STRING, + number=4, + ) + + +class StopAirflowCommandRequest(proto.Message): + r"""Stop Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + execution_id (str): + The unique ID of the command execution. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + force (bool): + If true, the execution is terminated + forcefully (SIGKILL). If false, the execution is + stopped gracefully, giving it time for cleanup. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + execution_id: str = proto.Field( + proto.STRING, + number=2, + ) + pod: str = proto.Field( + proto.STRING, + number=3, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class StopAirflowCommandResponse(proto.Message): + r"""Response to StopAirflowCommandRequest. + + Attributes: + is_done (bool): + Whether the execution is still running. + output (MutableSequence[str]): + Output message from stopping execution + request. + """ + + is_done: bool = proto.Field( + proto.BOOL, + number=1, + ) + output: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class PollAirflowCommandRequest(proto.Message): + r"""Poll Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + execution_id (str): + The unique ID of the command execution. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + next_line_number (int): + Line number from which new logs should be + fetched. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + execution_id: str = proto.Field( + proto.STRING, + number=2, + ) + pod: str = proto.Field( + proto.STRING, + number=3, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + next_line_number: int = proto.Field( + proto.INT32, + number=5, + ) + + +class PollAirflowCommandResponse(proto.Message): + r"""Response to PollAirflowCommandRequest. + + Attributes: + output (MutableSequence[google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse.Line]): + Output from the command execution. It may not + contain the full output and the caller may need + to poll for more lines. + output_end (bool): + Whether the command execution has finished + and there is no more output. + exit_info (google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse.ExitInfo): + The result exit status of the command. + """ + + class Line(proto.Message): + r"""Contains information about a single line from logs. + + Attributes: + line_number (int): + Number of the line. + content (str): + Text content of the log line. + """ + + line_number: int = proto.Field( + proto.INT32, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + + class ExitInfo(proto.Message): + r"""Information about how a command ended. + + Attributes: + exit_code (int): + The exit code from the command execution. + error (str): + Error message. Empty if there was no error. + """ + + exit_code: int = proto.Field( + proto.INT32, + number=1, + ) + error: str = proto.Field( + proto.STRING, + number=2, + ) + + output: MutableSequence[Line] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Line, + ) + output_end: bool = proto.Field( + proto.BOOL, + number=2, + ) + exit_info: ExitInfo = proto.Field( + proto.MESSAGE, + number=3, + message=ExitInfo, + ) + + class SaveSnapshotRequest(proto.Message): r"""Request to create a snapshot of a Cloud Composer environment. @@ -461,6 +721,75 @@ class LoadSnapshotResponse(proto.Message): r"""Response to LoadSnapshotRequest.""" +class DatabaseFailoverRequest(proto.Message): + r"""Request to trigger database failover (only for highly + resilient environments). + + Attributes: + environment (str): + Target environment: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatabaseFailoverResponse(proto.Message): + r"""Response for DatabaseFailoverRequest.""" + + +class FetchDatabasePropertiesRequest(proto.Message): + r"""Request to fetch properties of environment's database. + + Attributes: + environment (str): + Required. The resource name of the + environment, in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchDatabasePropertiesResponse(proto.Message): + r"""Response for FetchDatabasePropertiesRequest. + + Attributes: + primary_gce_zone (str): + The Compute Engine zone that the instance is + currently serving from. + secondary_gce_zone (str): + The Compute Engine zone that the failover + instance is currently serving from for a + regional Cloud SQL instance. + is_failover_replica_available (bool): + The availability status of the failover + replica. A false status indicates that the + failover replica is out of sync. The primary + instance can only fail over to the failover + replica when the status is true. + """ + + primary_gce_zone: str = proto.Field( + proto.STRING, + number=1, + ) + secondary_gce_zone: str = proto.Field( + proto.STRING, + number=2, + ) + is_failover_replica_available: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -563,6 +892,11 @@ class EnvironmentConfig(proto.Message): This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + resilience_mode (google.cloud.orchestration.airflow.service_v1.types.EnvironmentConfig.ResilienceMode): + Optional. Resilience mode of the Cloud Composer Environment. + + This field is supported for Cloud Composer environments in + versions composer-2.2.0-airflow-\ *.*.\* and newer. """ class EnvironmentSize(proto.Enum): @@ -583,6 +917,20 @@ class EnvironmentSize(proto.Enum): ENVIRONMENT_SIZE_MEDIUM = 2 ENVIRONMENT_SIZE_LARGE = 3 + class ResilienceMode(proto.Enum): + r"""Resilience mode of the Cloud Composer Environment. + + Values: + RESILIENCE_MODE_UNSPECIFIED (0): + Default mode doesn't change environment + parameters. + HIGH_RESILIENCE (1): + Enabled High Resilience mode, including Cloud + SQL HA. + """ + RESILIENCE_MODE_UNSPECIFIED = 0 + HIGH_RESILIENCE = 1 + gke_cluster: str = proto.Field( proto.STRING, number=1, @@ -663,6 +1011,11 @@ class EnvironmentSize(proto.Enum): number=18, message="RecoveryConfig", ) + resilience_mode: ResilienceMode = proto.Field( + proto.ENUM, + number=19, + enum=ResilienceMode, + ) class WebServerNetworkAccessControl(proto.Message): diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/operations.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/operations.py index 364157297248..2842211d59cc 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/operations.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1/types/operations.py @@ -99,6 +99,10 @@ class Type(proto.Enum): Saves snapshot of the resource operation. LOAD_SNAPSHOT (6): Loads snapshot of the resource operation. + DATABASE_FAILOVER (7): + Triggers failover of environment's Cloud SQL + instance (only for highly resilient + environments). """ TYPE_UNSPECIFIED = 0 CREATE = 1 @@ -107,6 +111,7 @@ class Type(proto.Enum): CHECK = 4 SAVE_SNAPSHOT = 5 LOAD_SNAPSHOT = 6 + DATABASE_FAILOVER = 7 state: State = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/__init__.py index ce61a11a0527..98b7a1313935 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/__init__.py @@ -28,11 +28,16 @@ CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, + DatabaseFailoverRequest, + DatabaseFailoverResponse, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandRequest, ExecuteAirflowCommandResponse, + FetchDatabasePropertiesRequest, + FetchDatabasePropertiesResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, @@ -43,6 +48,7 @@ MasterAuthorizedNetworksConfig, NetworkingConfig, NodeConfig, + PollAirflowCommandRequest, PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, @@ -52,6 +58,8 @@ SaveSnapshotResponse, ScheduledSnapshotsConfig, SoftwareConfig, + StopAirflowCommandRequest, + StopAirflowCommandResponse, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, @@ -72,12 +80,17 @@ "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", "EnvironmentsClient", + "ExecuteAirflowCommandRequest", "ExecuteAirflowCommandResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ImageVersion", @@ -93,6 +106,7 @@ "NetworkingConfig", "NodeConfig", "OperationMetadata", + "PollAirflowCommandRequest", "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", @@ -102,6 +116,8 @@ "SaveSnapshotResponse", "ScheduledSnapshotsConfig", "SoftwareConfig", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json index 1f120bab3d17..ba26cde2ff19 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_metadata.json @@ -20,11 +20,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -40,6 +55,11 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" @@ -50,6 +70,11 @@ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -70,11 +95,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -90,6 +130,11 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" @@ -100,6 +145,11 @@ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" @@ -120,11 +170,26 @@ "create_environment" ] }, + "DatabaseFailover": { + "methods": [ + "database_failover" + ] + }, "DeleteEnvironment": { "methods": [ "delete_environment" ] }, + "ExecuteAirflowCommand": { + "methods": [ + "execute_airflow_command" + ] + }, + "FetchDatabaseProperties": { + "methods": [ + "fetch_database_properties" + ] + }, "GetEnvironment": { "methods": [ "get_environment" @@ -140,6 +205,11 @@ "load_snapshot" ] }, + "PollAirflowCommand": { + "methods": [ + "poll_airflow_command" + ] + }, "RestartWebServer": { "methods": [ "restart_web_server" @@ -150,6 +220,11 @@ "save_snapshot" ] }, + "StopAirflowCommand": { + "methods": [ + "stop_airflow_command" + ] + }, "UpdateEnvironment": { "methods": [ "update_environment" diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py index b334dccf50ad..405b1cebcf15 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py index ab0736d07235..09cc9dabb09b 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/async_client.py @@ -1227,6 +1227,248 @@ async def sample_check_upgrade(): # Done; return the response. return response + async def execute_airflow_command( + self, + request: Optional[ + Union[environments.ExecuteAirflowCommandRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Executes Airflow CLI command. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_execute_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = await client.execute_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandRequest, dict]]): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.ExecuteAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.execute_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def stop_airflow_command( + self, + request: Optional[Union[environments.StopAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Stops Airflow CLI command execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_stop_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.StopAirflowCommandRequest( + ) + + # Make the request + response = await client.stop_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandRequest, dict]]): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.StopAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def poll_airflow_command( + self, + request: Optional[Union[environments.PollAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Polls Airflow CLI command execution and fetches logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_poll_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.PollAirflowCommandRequest( + ) + + # Make the request + response = await client.poll_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandRequest, dict]]): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.PollAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.poll_airflow_command, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def save_snapshot( self, request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, @@ -1425,6 +1667,188 @@ async def sample_load_snapshot(): # Done; return the response. return response + async def database_failover( + self, + request: Optional[Union[environments.DatabaseFailoverRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Triggers database failover (only for highly resilient + environments). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_database_failover(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverRequest, dict]]): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverResponse` + Response for DatabaseFailoverRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.DatabaseFailoverRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.database_failover, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + environments.DatabaseFailoverResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + async def fetch_database_properties( + self, + request: Optional[ + Union[environments.FetchDatabasePropertiesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Fetches database properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + async def sample_fetch_database_properties(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = await client.fetch_database_properties(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesRequest, dict]]): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + # Create or coerce a protobuf request object. + request = environments.FetchDatabasePropertiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_database_properties, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py index 482c06af5ee3..5dd4bfc6692d 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/client.py @@ -1462,6 +1462,251 @@ def sample_check_upgrade(): # Done; return the response. return response + def execute_airflow_command( + self, + request: Optional[ + Union[environments.ExecuteAirflowCommandRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Executes Airflow CLI command. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_execute_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = client.execute_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandRequest, dict]): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.ExecuteAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.ExecuteAirflowCommandRequest): + request = environments.ExecuteAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stop_airflow_command( + self, + request: Optional[Union[environments.StopAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Stops Airflow CLI command execution. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_stop_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.StopAirflowCommandRequest( + ) + + # Make the request + response = client.stop_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandRequest, dict]): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.StopAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.StopAirflowCommandRequest): + request = environments.StopAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def poll_airflow_command( + self, + request: Optional[Union[environments.PollAirflowCommandRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Polls Airflow CLI command execution and fetches logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_poll_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.PollAirflowCommandRequest( + ) + + # Make the request + response = client.poll_airflow_command(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandRequest, dict]): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.PollAirflowCommandRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.PollAirflowCommandRequest): + request = environments.PollAirflowCommandRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.poll_airflow_command] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def save_snapshot( self, request: Optional[Union[environments.SaveSnapshotRequest, dict]] = None, @@ -1662,6 +1907,192 @@ def sample_load_snapshot(): # Done; return the response. return response + def database_failover( + self, + request: Optional[Union[environments.DatabaseFailoverRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Triggers database failover (only for highly resilient + environments). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_database_failover(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverRequest, dict]): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverResponse` + Response for DatabaseFailoverRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.DatabaseFailoverRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.DatabaseFailoverRequest): + request = environments.DatabaseFailoverRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.database_failover] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + environments.DatabaseFailoverResponse, + metadata_type=operations.OperationMetadata, + ) + + # Done; return the response. + return response + + def fetch_database_properties( + self, + request: Optional[ + Union[environments.FetchDatabasePropertiesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Fetches database properties. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.orchestration.airflow import service_v1beta1 + + def sample_fetch_database_properties(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = client.fetch_database_properties(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesRequest, dict]): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a environments.FetchDatabasePropertiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, environments.FetchDatabasePropertiesRequest): + request = environments.FetchDatabasePropertiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_database_properties + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment", request.environment),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "EnvironmentsClient": return self diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py index 7b7527cdde77..5dc12fdec60d 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/base.py @@ -160,6 +160,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.execute_airflow_command: gapic_v1.method.wrap_method( + self.execute_airflow_command, + default_timeout=None, + client_info=client_info, + ), + self.stop_airflow_command: gapic_v1.method.wrap_method( + self.stop_airflow_command, + default_timeout=None, + client_info=client_info, + ), + self.poll_airflow_command: gapic_v1.method.wrap_method( + self.poll_airflow_command, + default_timeout=None, + client_info=client_info, + ), self.save_snapshot: gapic_v1.method.wrap_method( self.save_snapshot, default_timeout=None, @@ -170,6 +185,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.database_failover: gapic_v1.method.wrap_method( + self.database_failover, + default_timeout=None, + client_info=client_info, + ), + self.fetch_database_properties: gapic_v1.method.wrap_method( + self.fetch_database_properties, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -252,6 +277,42 @@ def check_upgrade( ]: raise NotImplementedError() + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + Union[ + environments.ExecuteAirflowCommandResponse, + Awaitable[environments.ExecuteAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + Union[ + environments.StopAirflowCommandResponse, + Awaitable[environments.StopAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + Union[ + environments.PollAirflowCommandResponse, + Awaitable[environments.PollAirflowCommandResponse], + ], + ]: + raise NotImplementedError() + @property def save_snapshot( self, @@ -270,6 +331,27 @@ def load_snapshot( ]: raise NotImplementedError() + @property + def database_failover( + self, + ) -> Callable[ + [environments.DatabaseFailoverRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + Union[ + environments.FetchDatabasePropertiesResponse, + Awaitable[environments.FetchDatabasePropertiesResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py index 0ed8ffd24831..75c253dc8fe2 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc.py @@ -431,6 +431,93 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + environments.ExecuteAirflowCommandResponse, + ]: + r"""Return a callable for the execute airflow command method over gRPC. + + Executes Airflow CLI command. + + Returns: + Callable[[~.ExecuteAirflowCommandRequest], + ~.ExecuteAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_airflow_command" not in self._stubs: + self._stubs["execute_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/ExecuteAirflowCommand", + request_serializer=environments.ExecuteAirflowCommandRequest.serialize, + response_deserializer=environments.ExecuteAirflowCommandResponse.deserialize, + ) + return self._stubs["execute_airflow_command"] + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + environments.StopAirflowCommandResponse, + ]: + r"""Return a callable for the stop airflow command method over gRPC. + + Stops Airflow CLI command execution. + + Returns: + Callable[[~.StopAirflowCommandRequest], + ~.StopAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_airflow_command" not in self._stubs: + self._stubs["stop_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/StopAirflowCommand", + request_serializer=environments.StopAirflowCommandRequest.serialize, + response_deserializer=environments.StopAirflowCommandResponse.deserialize, + ) + return self._stubs["stop_airflow_command"] + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + environments.PollAirflowCommandResponse, + ]: + r"""Return a callable for the poll airflow command method over gRPC. + + Polls Airflow CLI command execution and fetches logs. + + Returns: + Callable[[~.PollAirflowCommandRequest], + ~.PollAirflowCommandResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "poll_airflow_command" not in self._stubs: + self._stubs["poll_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/PollAirflowCommand", + request_serializer=environments.PollAirflowCommandRequest.serialize, + response_deserializer=environments.PollAirflowCommandResponse.deserialize, + ) + return self._stubs["poll_airflow_command"] + @property def save_snapshot( self, @@ -489,6 +576,62 @@ def load_snapshot( ) return self._stubs["load_snapshot"] + @property + def database_failover( + self, + ) -> Callable[[environments.DatabaseFailoverRequest], operations_pb2.Operation]: + r"""Return a callable for the database failover method over gRPC. + + Triggers database failover (only for highly resilient + environments). + + Returns: + Callable[[~.DatabaseFailoverRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "database_failover" not in self._stubs: + self._stubs["database_failover"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/DatabaseFailover", + request_serializer=environments.DatabaseFailoverRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["database_failover"] + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + environments.FetchDatabasePropertiesResponse, + ]: + r"""Return a callable for the fetch database properties method over gRPC. + + Fetches database properties. + + Returns: + Callable[[~.FetchDatabasePropertiesRequest], + ~.FetchDatabasePropertiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_database_properties" not in self._stubs: + self._stubs["fetch_database_properties"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/FetchDatabaseProperties", + request_serializer=environments.FetchDatabasePropertiesRequest.serialize, + response_deserializer=environments.FetchDatabasePropertiesResponse.deserialize, + ) + return self._stubs["fetch_database_properties"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py index 5ff5ff163f77..ad2ff852cf07 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py @@ -449,6 +449,93 @@ def check_upgrade( ) return self._stubs["check_upgrade"] + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + Awaitable[environments.ExecuteAirflowCommandResponse], + ]: + r"""Return a callable for the execute airflow command method over gRPC. + + Executes Airflow CLI command. + + Returns: + Callable[[~.ExecuteAirflowCommandRequest], + Awaitable[~.ExecuteAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "execute_airflow_command" not in self._stubs: + self._stubs["execute_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/ExecuteAirflowCommand", + request_serializer=environments.ExecuteAirflowCommandRequest.serialize, + response_deserializer=environments.ExecuteAirflowCommandResponse.deserialize, + ) + return self._stubs["execute_airflow_command"] + + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + Awaitable[environments.StopAirflowCommandResponse], + ]: + r"""Return a callable for the stop airflow command method over gRPC. + + Stops Airflow CLI command execution. + + Returns: + Callable[[~.StopAirflowCommandRequest], + Awaitable[~.StopAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_airflow_command" not in self._stubs: + self._stubs["stop_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/StopAirflowCommand", + request_serializer=environments.StopAirflowCommandRequest.serialize, + response_deserializer=environments.StopAirflowCommandResponse.deserialize, + ) + return self._stubs["stop_airflow_command"] + + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + Awaitable[environments.PollAirflowCommandResponse], + ]: + r"""Return a callable for the poll airflow command method over gRPC. + + Polls Airflow CLI command execution and fetches logs. + + Returns: + Callable[[~.PollAirflowCommandRequest], + Awaitable[~.PollAirflowCommandResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "poll_airflow_command" not in self._stubs: + self._stubs["poll_airflow_command"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/PollAirflowCommand", + request_serializer=environments.PollAirflowCommandRequest.serialize, + response_deserializer=environments.PollAirflowCommandResponse.deserialize, + ) + return self._stubs["poll_airflow_command"] + @property def save_snapshot( self, @@ -511,6 +598,64 @@ def load_snapshot( ) return self._stubs["load_snapshot"] + @property + def database_failover( + self, + ) -> Callable[ + [environments.DatabaseFailoverRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the database failover method over gRPC. + + Triggers database failover (only for highly resilient + environments). + + Returns: + Callable[[~.DatabaseFailoverRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "database_failover" not in self._stubs: + self._stubs["database_failover"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/DatabaseFailover", + request_serializer=environments.DatabaseFailoverRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["database_failover"] + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + Awaitable[environments.FetchDatabasePropertiesResponse], + ]: + r"""Return a callable for the fetch database properties method over gRPC. + + Fetches database properties. + + Returns: + Callable[[~.FetchDatabasePropertiesRequest], + Awaitable[~.FetchDatabasePropertiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_database_properties" not in self._stubs: + self._stubs["fetch_database_properties"] = self.grpc_channel.unary_unary( + "/google.cloud.orchestration.airflow.service.v1beta1.Environments/FetchDatabaseProperties", + request_serializer=environments.FetchDatabasePropertiesRequest.serialize, + response_deserializer=environments.FetchDatabasePropertiesResponse.deserialize, + ) + return self._stubs["fetch_database_properties"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/rest.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/rest.py index 85315d7ca568..2ff9d1dc1894 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/rest.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/rest.py @@ -88,6 +88,14 @@ def post_create_environment(self, response): logging.log(f"Received response: {response}") return response + def pre_database_failover(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_database_failover(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -96,6 +104,22 @@ def post_delete_environment(self, response): logging.log(f"Received response: {response}") return response + def pre_execute_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_database_properties(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_database_properties(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -120,6 +144,14 @@ def post_load_snapshot(self, response): logging.log(f"Received response: {response}") return response + def pre_poll_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_poll_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + def pre_restart_web_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -136,6 +168,14 @@ def post_save_snapshot(self, response): logging.log(f"Received response: {response}") return response + def pre_stop_airflow_command(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stop_airflow_command(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_environment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -196,6 +236,29 @@ def post_create_environment( """ return response + def pre_database_failover( + self, + request: environments.DatabaseFailoverRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.DatabaseFailoverRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for database_failover + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_database_failover( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for database_failover + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_delete_environment( self, request: environments.DeleteEnvironmentRequest, @@ -219,6 +282,52 @@ def post_delete_environment( """ return response + def pre_execute_airflow_command( + self, + request: environments.ExecuteAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.ExecuteAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for execute_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_execute_airflow_command( + self, response: environments.ExecuteAirflowCommandResponse + ) -> environments.ExecuteAirflowCommandResponse: + """Post-rpc interceptor for execute_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + + def pre_fetch_database_properties( + self, + request: environments.FetchDatabasePropertiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.FetchDatabasePropertiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for fetch_database_properties + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_fetch_database_properties( + self, response: environments.FetchDatabasePropertiesResponse + ) -> environments.FetchDatabasePropertiesResponse: + """Post-rpc interceptor for fetch_database_properties + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_get_environment( self, request: environments.GetEnvironmentRequest, @@ -288,6 +397,29 @@ def post_load_snapshot( """ return response + def pre_poll_airflow_command( + self, + request: environments.PollAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.PollAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for poll_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_poll_airflow_command( + self, response: environments.PollAirflowCommandResponse + ) -> environments.PollAirflowCommandResponse: + """Post-rpc interceptor for poll_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_restart_web_server( self, request: environments.RestartWebServerRequest, @@ -334,6 +466,29 @@ def post_save_snapshot( """ return response + def pre_stop_airflow_command( + self, + request: environments.StopAirflowCommandRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[environments.StopAirflowCommandRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for stop_airflow_command + + Override in a subclass to manipulate the request or metadata + before they are sent to the Environments server. + """ + return request, metadata + + def post_stop_airflow_command( + self, response: environments.StopAirflowCommandResponse + ) -> environments.StopAirflowCommandResponse: + """Post-rpc interceptor for stop_airflow_command + + Override in a subclass to manipulate the response + after it is returned by the Environments server but before + it is returned to user code. + """ + return response + def pre_update_environment( self, request: environments.UpdateEnvironmentRequest, @@ -741,6 +896,95 @@ def __call__( resp = self._interceptor.post_create_environment(resp) return resp + class _DatabaseFailover(EnvironmentsRestStub): + def __hash__(self): + return hash("DatabaseFailover") + + def __call__( + self, + request: environments.DatabaseFailoverRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the database failover method over HTTP. + + Args: + request (~.environments.DatabaseFailoverRequest): + The request object. Request to trigger database failover + (only for highly resilient + environments). + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{environment=projects/*/locations/*/environments/*}:databaseFailover", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_database_failover( + request, metadata + ) + pb_request = environments.DatabaseFailoverRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_database_failover(resp) + return resp + class _DeleteEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("DeleteEnvironment") @@ -819,6 +1063,185 @@ def __call__( resp = self._interceptor.post_delete_environment(resp) return resp + class _ExecuteAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("ExecuteAirflowCommand") + + def __call__( + self, + request: environments.ExecuteAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.ExecuteAirflowCommandResponse: + r"""Call the execute airflow command method over HTTP. + + Args: + request (~.environments.ExecuteAirflowCommandRequest): + The request object. Execute Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.ExecuteAirflowCommandResponse: + Response to + ExecuteAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{environment=projects/*/locations/*/environments/*}:executeAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_execute_airflow_command( + request, metadata + ) + pb_request = environments.ExecuteAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.ExecuteAirflowCommandResponse() + pb_resp = environments.ExecuteAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_execute_airflow_command(resp) + return resp + + class _FetchDatabaseProperties(EnvironmentsRestStub): + def __hash__(self): + return hash("FetchDatabaseProperties") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: environments.FetchDatabasePropertiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.FetchDatabasePropertiesResponse: + r"""Call the fetch database properties method over HTTP. + + Args: + request (~.environments.FetchDatabasePropertiesRequest): + The request object. Request to fetch properties of + environment's database. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.FetchDatabasePropertiesResponse: + Response for + FetchDatabasePropertiesRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{environment=projects/*/locations/*/environments/*}:fetchDatabaseProperties", + }, + ] + request, metadata = self._interceptor.pre_fetch_database_properties( + request, metadata + ) + pb_request = environments.FetchDatabasePropertiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.FetchDatabasePropertiesResponse() + pb_resp = environments.FetchDatabasePropertiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_fetch_database_properties(resp) + return resp + class _GetEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("GetEnvironment") @@ -1062,6 +1485,94 @@ def __call__( resp = self._interceptor.post_load_snapshot(resp) return resp + class _PollAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("PollAirflowCommand") + + def __call__( + self, + request: environments.PollAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.PollAirflowCommandResponse: + r"""Call the poll airflow command method over HTTP. + + Args: + request (~.environments.PollAirflowCommandRequest): + The request object. Poll Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.PollAirflowCommandResponse: + Response to + PollAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{environment=projects/*/locations/*/environments/*}:pollAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_poll_airflow_command( + request, metadata + ) + pb_request = environments.PollAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.PollAirflowCommandResponse() + pb_resp = environments.PollAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_poll_airflow_command(resp) + return resp + class _RestartWebServer(EnvironmentsRestStub): def __hash__(self): return hash("RestartWebServer") @@ -1235,6 +1746,94 @@ def __call__( resp = self._interceptor.post_save_snapshot(resp) return resp + class _StopAirflowCommand(EnvironmentsRestStub): + def __hash__(self): + return hash("StopAirflowCommand") + + def __call__( + self, + request: environments.StopAirflowCommandRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> environments.StopAirflowCommandResponse: + r"""Call the stop airflow command method over HTTP. + + Args: + request (~.environments.StopAirflowCommandRequest): + The request object. Stop Airflow Command request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.environments.StopAirflowCommandResponse: + Response to + StopAirflowCommandRequest. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{environment=projects/*/locations/*/environments/*}:stopAirflowCommand", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_stop_airflow_command( + request, metadata + ) + pb_request = environments.StopAirflowCommandRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = environments.StopAirflowCommandResponse() + pb_resp = environments.StopAirflowCommandResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_stop_airflow_command(resp) + return resp + class _UpdateEnvironment(EnvironmentsRestStub): def __hash__(self): return hash("UpdateEnvironment") @@ -1351,6 +1950,14 @@ def create_environment( # In C++ this would require a dynamic_cast return self._CreateEnvironment(self._session, self._host, self._interceptor) # type: ignore + @property + def database_failover( + self, + ) -> Callable[[environments.DatabaseFailoverRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DatabaseFailover(self._session, self._host, self._interceptor) # type: ignore + @property def delete_environment( self, @@ -1359,6 +1966,28 @@ def delete_environment( # In C++ this would require a dynamic_cast return self._DeleteEnvironment(self._session, self._host, self._interceptor) # type: ignore + @property + def execute_airflow_command( + self, + ) -> Callable[ + [environments.ExecuteAirflowCommandRequest], + environments.ExecuteAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_database_properties( + self, + ) -> Callable[ + [environments.FetchDatabasePropertiesRequest], + environments.FetchDatabasePropertiesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDatabaseProperties(self._session, self._host, self._interceptor) # type: ignore + @property def get_environment( self, @@ -1385,6 +2014,17 @@ def load_snapshot( # In C++ this would require a dynamic_cast return self._LoadSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property + def poll_airflow_command( + self, + ) -> Callable[ + [environments.PollAirflowCommandRequest], + environments.PollAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PollAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + @property def restart_web_server( self, @@ -1401,6 +2041,17 @@ def save_snapshot( # In C++ this would require a dynamic_cast return self._SaveSnapshot(self._session, self._host, self._interceptor) # type: ignore + @property + def stop_airflow_command( + self, + ) -> Callable[ + [environments.StopAirflowCommandRequest], + environments.StopAirflowCommandResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StopAirflowCommand(self._session, self._host, self._interceptor) # type: ignore + @property def update_environment( self, diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py index d23643d198fd..673619aa0e2c 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/__init__.py @@ -19,11 +19,16 @@ CloudDataLineageIntegration, CreateEnvironmentRequest, DatabaseConfig, + DatabaseFailoverRequest, + DatabaseFailoverResponse, DeleteEnvironmentRequest, EncryptionConfig, Environment, EnvironmentConfig, + ExecuteAirflowCommandRequest, ExecuteAirflowCommandResponse, + FetchDatabasePropertiesRequest, + FetchDatabasePropertiesResponse, GetEnvironmentRequest, IPAllocationPolicy, ListEnvironmentsRequest, @@ -34,6 +39,7 @@ MasterAuthorizedNetworksConfig, NetworkingConfig, NodeConfig, + PollAirflowCommandRequest, PollAirflowCommandResponse, PrivateClusterConfig, PrivateEnvironmentConfig, @@ -43,6 +49,8 @@ SaveSnapshotResponse, ScheduledSnapshotsConfig, SoftwareConfig, + StopAirflowCommandRequest, + StopAirflowCommandResponse, UpdateEnvironmentRequest, WebServerConfig, WebServerNetworkAccessControl, @@ -61,11 +69,16 @@ "CloudDataLineageIntegration", "CreateEnvironmentRequest", "DatabaseConfig", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", "DeleteEnvironmentRequest", "EncryptionConfig", "Environment", "EnvironmentConfig", + "ExecuteAirflowCommandRequest", "ExecuteAirflowCommandResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "GetEnvironmentRequest", "IPAllocationPolicy", "ListEnvironmentsRequest", @@ -76,6 +89,7 @@ "MasterAuthorizedNetworksConfig", "NetworkingConfig", "NodeConfig", + "PollAirflowCommandRequest", "PollAirflowCommandResponse", "PrivateClusterConfig", "PrivateEnvironmentConfig", @@ -85,6 +99,8 @@ "SaveSnapshotResponse", "ScheduledSnapshotsConfig", "SoftwareConfig", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", "UpdateEnvironmentRequest", "WebServerConfig", "WebServerNetworkAccessControl", diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py index 1ee53c290b37..c7dfc4511123 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/environments.py @@ -31,12 +31,20 @@ "DeleteEnvironmentRequest", "UpdateEnvironmentRequest", "RestartWebServerRequest", + "ExecuteAirflowCommandRequest", "ExecuteAirflowCommandResponse", + "StopAirflowCommandRequest", + "StopAirflowCommandResponse", + "PollAirflowCommandRequest", "PollAirflowCommandResponse", "SaveSnapshotRequest", "SaveSnapshotResponse", "LoadSnapshotRequest", "LoadSnapshotResponse", + "DatabaseFailoverRequest", + "DatabaseFailoverResponse", + "FetchDatabasePropertiesRequest", + "FetchDatabasePropertiesResponse", "EnvironmentConfig", "WebServerNetworkAccessControl", "SoftwareConfig", @@ -419,6 +427,44 @@ class RestartWebServerRequest(proto.Message): ) +class ExecuteAirflowCommandRequest(proto.Message): + r"""Execute Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + command (str): + Airflow command. + subcommand (str): + Airflow subcommand. + parameters (MutableSequence[str]): + Parameters for the Airflow command/subcommand as an array of + arguments. It may contain positional arguments like + ``["my-dag-id"]``, key-value parameters like + ``["--foo=bar"]`` or ``["--foo","bar"]``, or other flags + like ``["-f"]``. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + command: str = proto.Field( + proto.STRING, + number=2, + ) + subcommand: str = proto.Field( + proto.STRING, + number=3, + ) + parameters: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ExecuteAirflowCommandResponse(proto.Message): r"""Response to ExecuteAirflowCommandRequest. @@ -454,6 +500,114 @@ class ExecuteAirflowCommandResponse(proto.Message): ) +class StopAirflowCommandRequest(proto.Message): + r"""Stop Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + execution_id (str): + The unique ID of the command execution. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + force (bool): + If true, the execution is terminated + forcefully (SIGKILL). If false, the execution is + stopped gracefully, giving it time for cleanup. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + execution_id: str = proto.Field( + proto.STRING, + number=2, + ) + pod: str = proto.Field( + proto.STRING, + number=3, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + force: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class StopAirflowCommandResponse(proto.Message): + r"""Response to StopAirflowCommandRequest. + + Attributes: + is_done (bool): + Whether the execution is still running. + output (MutableSequence[str]): + Output message from stopping execution + request. + """ + + is_done: bool = proto.Field( + proto.BOOL, + number=1, + ) + output: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class PollAirflowCommandRequest(proto.Message): + r"""Poll Airflow Command request. + + Attributes: + environment (str): + The resource name of the environment in the + form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + execution_id (str): + The unique ID of the command execution. + pod (str): + The name of the pod where the command is + executed. + pod_namespace (str): + The namespace of the pod where the command is + executed. + next_line_number (int): + Line number from which new logs should be + fetched. + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + execution_id: str = proto.Field( + proto.STRING, + number=2, + ) + pod: str = proto.Field( + proto.STRING, + number=3, + ) + pod_namespace: str = proto.Field( + proto.STRING, + number=4, + ) + next_line_number: int = proto.Field( + proto.INT32, + number=5, + ) + + class PollAirflowCommandResponse(proto.Message): r"""Response to PollAirflowCommandRequest. @@ -620,6 +774,75 @@ class LoadSnapshotResponse(proto.Message): r"""Response to LoadSnapshotRequest.""" +class DatabaseFailoverRequest(proto.Message): + r"""Request to trigger database failover (only for highly + resilient environments). + + Attributes: + environment (str): + Target environment: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DatabaseFailoverResponse(proto.Message): + r"""Response for DatabaseFailoverRequest.""" + + +class FetchDatabasePropertiesRequest(proto.Message): + r"""Request to fetch properties of environment's database. + + Attributes: + environment (str): + Required. The resource name of the + environment, in the form: + "projects/{projectId}/locations/{locationId}/environments/{environmentId}". + """ + + environment: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchDatabasePropertiesResponse(proto.Message): + r"""Response for FetchDatabasePropertiesRequest. + + Attributes: + primary_gce_zone (str): + The Compute Engine zone that the instance is + currently serving from. + secondary_gce_zone (str): + The Compute Engine zone that the failover + instance is currently serving from for a + regional Cloud SQL instance. + is_failover_replica_available (bool): + The availability status of the failover + replica. A false status indicates that the + failover replica is out of sync. The primary + instance can only fail over to the failover + replica when the status is true. + """ + + primary_gce_zone: str = proto.Field( + proto.STRING, + number=1, + ) + secondary_gce_zone: str = proto.Field( + proto.STRING, + number=2, + ) + is_failover_replica_available: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class EnvironmentConfig(proto.Message): r"""Configuration information for an environment. @@ -725,6 +948,11 @@ class EnvironmentConfig(proto.Message): This field is supported for Cloud Composer environments in versions composer-2.\ *.*-airflow-*.*.\* and newer. + resilience_mode (google.cloud.orchestration.airflow.service_v1beta1.types.EnvironmentConfig.ResilienceMode): + Optional. Resilience mode of the Cloud Composer Environment. + + This field is supported for Cloud Composer environments in + versions composer-2.2.0-airflow-\ *.*.\* and newer. """ class EnvironmentSize(proto.Enum): @@ -745,6 +973,20 @@ class EnvironmentSize(proto.Enum): ENVIRONMENT_SIZE_MEDIUM = 2 ENVIRONMENT_SIZE_LARGE = 3 + class ResilienceMode(proto.Enum): + r"""Resilience mode of the Cloud Composer Environment. + + Values: + RESILIENCE_MODE_UNSPECIFIED (0): + Default mode doesn't change environment + parameters. + HIGH_RESILIENCE (1): + Enabled High Resilience mode, including Cloud + SQL HA. + """ + RESILIENCE_MODE_UNSPECIFIED = 0 + HIGH_RESILIENCE = 1 + gke_cluster: str = proto.Field( proto.STRING, number=1, @@ -825,6 +1067,11 @@ class EnvironmentSize(proto.Enum): number=18, message="RecoveryConfig", ) + resilience_mode: ResilienceMode = proto.Field( + proto.ENUM, + number=20, + enum=ResilienceMode, + ) class WebServerNetworkAccessControl(proto.Message): diff --git a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py index 7f7922e24dc3..08c8ed2a247b 100644 --- a/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py +++ b/packages/google-cloud-orchestration-airflow/google/cloud/orchestration/airflow/service_v1beta1/types/operations.py @@ -95,6 +95,10 @@ class Type(proto.Enum): Saves snapshot of the resource operation. LOAD_SNAPSHOT (6): Loads snapshot of the resource operation. + DATABASE_FAILOVER (7): + Triggers failover of environment's Cloud SQL + instance (only for highly resilient + environments). """ TYPE_UNSPECIFIED = 0 CREATE = 1 @@ -103,6 +107,7 @@ class Type(proto.Enum): CHECK = 4 SAVE_SNAPSHOT = 5 LOAD_SNAPSHOT = 6 + DATABASE_FAILOVER = 7 state: State = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py new file mode 100644 index 000000000000..d0655bf2a8d1 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_create_environment(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.CreateEnvironmentRequest( + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CreateEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_sync.py new file mode 100644 index 000000000000..e0a9c90c110b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_create_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_create_environment(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.CreateEnvironmentRequest( + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_CreateEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_async.py new file mode 100644 index 000000000000..8c36632bb46f --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DatabaseFailover +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_DatabaseFailover_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_database_failover(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_DatabaseFailover_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_sync.py new file mode 100644 index 000000000000..6b3c921a3abd --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_database_failover_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DatabaseFailover +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_DatabaseFailover_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_database_failover(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_DatabaseFailover_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py new file mode 100644 index 000000000000..ea715af765ad --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_delete_environment(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.DeleteEnvironmentRequest( + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_DeleteEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_sync.py new file mode 100644 index 000000000000..7563eeb487ad --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_delete_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_delete_environment(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.DeleteEnvironmentRequest( + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_DeleteEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_async.py new file mode 100644 index 000000000000..19be15194623 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_ExecuteAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_execute_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = await client.execute_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_ExecuteAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_sync.py new file mode 100644 index 000000000000..cd333c7a0c19 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_execute_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_ExecuteAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_execute_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = client.execute_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_ExecuteAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_async.py new file mode 100644 index 000000000000..0de483668671 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDatabaseProperties +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_FetchDatabaseProperties_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_fetch_database_properties(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = await client.fetch_database_properties(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_FetchDatabaseProperties_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_sync.py new file mode 100644 index 000000000000..351c665a2dab --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_fetch_database_properties_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDatabaseProperties +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_FetchDatabaseProperties_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_fetch_database_properties(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = client.fetch_database_properties(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_FetchDatabaseProperties_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_async.py new file mode 100644 index 000000000000..a2534c3f26bb --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_get_environment(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.GetEnvironmentRequest( + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_GetEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_sync.py new file mode 100644 index 000000000000..666b75a8df6a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_get_environment_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_get_environment(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.GetEnvironmentRequest( + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_GetEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_async.py new file mode 100644 index 000000000000..150ae0c0281a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_list_environments(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.ListEnvironmentsRequest( + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END composer_v1_generated_Environments_ListEnvironments_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_sync.py new file mode 100644 index 000000000000..0724fdb3ee19 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_list_environments_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_list_environments(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.ListEnvironmentsRequest( + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END composer_v1_generated_Environments_ListEnvironments_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py new file mode 100644 index 000000000000..953a0294dff3 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py new file mode 100644 index 000000000000..9d3283833652 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_LoadSnapshot_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_async.py new file mode 100644 index 000000000000..a9943af88b29 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PollAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_PollAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_poll_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.PollAirflowCommandRequest( + ) + + # Make the request + response = await client.poll_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_PollAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_sync.py new file mode 100644 index 000000000000..de3a51afdf0b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_poll_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PollAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_PollAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_poll_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.PollAirflowCommandRequest( + ) + + # Make the request + response = client.poll_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_PollAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py new file mode 100644 index 000000000000..c580bdd3f5c2 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py new file mode 100644 index 000000000000..09882bf17a2a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_SaveSnapshot_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_async.py new file mode 100644 index 000000000000..126036b5031c --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_StopAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_stop_airflow_command(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.StopAirflowCommandRequest( + ) + + # Make the request + response = await client.stop_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_StopAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_sync.py new file mode 100644 index 000000000000..b7939eb741bc --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_stop_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_StopAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_stop_airflow_command(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.StopAirflowCommandRequest( + ) + + # Make the request + response = client.stop_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_StopAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py new file mode 100644 index 000000000000..a0cbc7e8484f --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_update_environment(): + # Create a client + client = service_v1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1.UpdateEnvironmentRequest( + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_UpdateEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_sync.py new file mode 100644 index 000000000000..902831c1966c --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_environments_update_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_Environments_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_update_environment(): + # Create a client + client = service_v1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1.UpdateEnvironmentRequest( + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1_generated_Environments_UpdateEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_async.py new file mode 100644 index 000000000000..84fb0d50f83d --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListImageVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_ImageVersions_ListImageVersions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +async def sample_list_image_versions(): + # Create a client + client = service_v1.ImageVersionsAsyncClient() + + # Initialize request argument(s) + request = service_v1.ListImageVersionsRequest( + ) + + # Make the request + page_result = client.list_image_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END composer_v1_generated_ImageVersions_ListImageVersions_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_sync.py new file mode 100644 index 000000000000..7d5b6624afd5 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1_generated_image_versions_list_image_versions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListImageVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow + + +# [START composer_v1_generated_ImageVersions_ListImageVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1 + + +def sample_list_image_versions(): + # Create a client + client = service_v1.ImageVersionsClient() + + # Initialize request argument(s) + request = service_v1.ListImageVersionsRequest( + ) + + # Make the request + page_result = client.list_image_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END composer_v1_generated_ImageVersions_ListImageVersions_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py new file mode 100644 index 000000000000..6f832b6fe4fa --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_CheckUpgrade_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_check_upgrade(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.CheckUpgradeRequest( + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_CheckUpgrade_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_sync.py new file mode 100644 index 000000000000..25de62db95cb --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_check_upgrade_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckUpgrade +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_CheckUpgrade_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_check_upgrade(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.CheckUpgradeRequest( + ) + + # Make the request + operation = client.check_upgrade(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_CheckUpgrade_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py new file mode 100644 index 000000000000..4e1a6142eb1c --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_CreateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_create_environment(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.CreateEnvironmentRequest( + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_CreateEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_sync.py new file mode 100644 index 000000000000..47af308b6b91 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_create_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_CreateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_create_environment(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.CreateEnvironmentRequest( + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_CreateEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_async.py new file mode 100644 index 000000000000..14bdd3bd816b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DatabaseFailover +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_DatabaseFailover_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_database_failover(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_DatabaseFailover_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_sync.py new file mode 100644 index 000000000000..468299ee386a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_database_failover_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DatabaseFailover +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_DatabaseFailover_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_database_failover(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.DatabaseFailoverRequest( + ) + + # Make the request + operation = client.database_failover(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_DatabaseFailover_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py new file mode 100644 index 000000000000..e0b793c36621 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_DeleteEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_delete_environment(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.DeleteEnvironmentRequest( + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_DeleteEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_sync.py new file mode 100644 index 000000000000..c89c8ed4e75a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_delete_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_DeleteEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_delete_environment(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.DeleteEnvironmentRequest( + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_DeleteEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_async.py new file mode 100644 index 000000000000..a651cba160a3 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_ExecuteAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_execute_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = await client.execute_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_ExecuteAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_sync.py new file mode 100644 index 000000000000..d78b247ca3c5 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_execute_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_ExecuteAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_execute_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.ExecuteAirflowCommandRequest( + ) + + # Make the request + response = client.execute_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_ExecuteAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_async.py new file mode 100644 index 000000000000..99693fbb4c54 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDatabaseProperties +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_FetchDatabaseProperties_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_fetch_database_properties(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = await client.fetch_database_properties(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_FetchDatabaseProperties_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_sync.py new file mode 100644 index 000000000000..fba72d5ea4df --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_fetch_database_properties_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDatabaseProperties +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_FetchDatabaseProperties_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_fetch_database_properties(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.FetchDatabasePropertiesRequest( + environment="environment_value", + ) + + # Make the request + response = client.fetch_database_properties(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_FetchDatabaseProperties_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_async.py new file mode 100644 index 000000000000..bf32e05c79fc --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_GetEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_get_environment(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.GetEnvironmentRequest( + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_GetEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_sync.py new file mode 100644 index 000000000000..6472177609b7 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_get_environment_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_GetEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_get_environment(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.GetEnvironmentRequest( + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_GetEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_async.py new file mode 100644 index 000000000000..1ab6ce68e4c3 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_ListEnvironments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_list_environments(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.ListEnvironmentsRequest( + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END composer_v1beta1_generated_Environments_ListEnvironments_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_sync.py new file mode 100644 index 000000000000..56d6f268d9fb --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_list_environments_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_ListEnvironments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_list_environments(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.ListEnvironmentsRequest( + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END composer_v1beta1_generated_Environments_ListEnvironments_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py new file mode 100644 index 000000000000..9a689ea86069 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py new file mode 100644 index 000000000000..6424bbd4ba94 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_load_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LoadSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_LoadSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_load_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.LoadSnapshotRequest( + ) + + # Make the request + operation = client.load_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_LoadSnapshot_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_async.py new file mode 100644 index 000000000000..1654e6e195b6 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PollAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_PollAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_poll_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.PollAirflowCommandRequest( + ) + + # Make the request + response = await client.poll_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_PollAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_sync.py new file mode 100644 index 000000000000..edefc8c82b64 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_poll_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PollAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_PollAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_poll_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.PollAirflowCommandRequest( + ) + + # Make the request + response = client.poll_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_PollAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py new file mode 100644 index 000000000000..2c26cfbc4c2b --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartWebServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_RestartWebServer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_restart_web_server(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.RestartWebServerRequest( + ) + + # Make the request + operation = client.restart_web_server(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_RestartWebServer_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_sync.py new file mode 100644 index 000000000000..cd5f25d19a97 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_restart_web_server_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestartWebServer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_RestartWebServer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_restart_web_server(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.RestartWebServerRequest( + ) + + # Make the request + operation = client.restart_web_server(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_RestartWebServer_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py new file mode 100644 index 000000000000..e007dcd0595a --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py new file mode 100644 index 000000000000..88e01f01d51c --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_save_snapshot_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SaveSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_SaveSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_save_snapshot(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.SaveSnapshotRequest( + ) + + # Make the request + operation = client.save_snapshot(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_SaveSnapshot_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_async.py new file mode 100644 index 000000000000..d2923ae34a2c --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_StopAirflowCommand_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_stop_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.StopAirflowCommandRequest( + ) + + # Make the request + response = await client.stop_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_StopAirflowCommand_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_sync.py new file mode 100644 index 000000000000..354eeea59b65 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_stop_airflow_command_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StopAirflowCommand +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_StopAirflowCommand_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_stop_airflow_command(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.StopAirflowCommandRequest( + ) + + # Make the request + response = client.stop_airflow_command(request=request) + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_StopAirflowCommand_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py new file mode 100644 index 000000000000..a848381a9c06 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_UpdateEnvironment_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_update_environment(): + # Create a client + client = service_v1beta1.EnvironmentsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.UpdateEnvironmentRequest( + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_UpdateEnvironment_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_sync.py new file mode 100644 index 000000000000..0de210f8e6d5 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_environments_update_environment_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_Environments_UpdateEnvironment_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_update_environment(): + # Create a client + client = service_v1beta1.EnvironmentsClient() + + # Initialize request argument(s) + request = service_v1beta1.UpdateEnvironmentRequest( + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END composer_v1beta1_generated_Environments_UpdateEnvironment_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_async.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_async.py new file mode 100644 index 000000000000..3651597681c2 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListImageVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_ImageVersions_ListImageVersions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +async def sample_list_image_versions(): + # Create a client + client = service_v1beta1.ImageVersionsAsyncClient() + + # Initialize request argument(s) + request = service_v1beta1.ListImageVersionsRequest( + ) + + # Make the request + page_result = client.list_image_versions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END composer_v1beta1_generated_ImageVersions_ListImageVersions_async] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_sync.py b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_sync.py new file mode 100644 index 000000000000..0b68eee82db0 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/composer_v1beta1_generated_image_versions_list_image_versions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListImageVersions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-orchestration-airflow-service + + +# [START composer_v1beta1_generated_ImageVersions_ListImageVersions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.orchestration.airflow import service_v1beta1 + + +def sample_list_image_versions(): + # Create a client + client = service_v1beta1.ImageVersionsClient() + + # Initialize request argument(s) + request = service_v1beta1.ListImageVersionsRequest( + ) + + # Make the request + page_result = client.list_image_versions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END composer_v1beta1_generated_ImageVersions_ListImageVersions_sync] diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json new file mode 100644 index 000000000000..73f9b69284b1 --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1.json @@ -0,0 +1,2076 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.orchestration.airflow.service.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-orchestration-airflow", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1.types.Environment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "composer_v1_generated_environments_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CreateEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.create_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1.types.Environment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "composer_v1_generated_environments_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_CreateEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.database_failover", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.DatabaseFailover", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "DatabaseFailover" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "database_failover" + }, + "description": "Sample for DatabaseFailover", + "file": "composer_v1_generated_environments_database_failover_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_DatabaseFailover_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_database_failover_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.database_failover", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.DatabaseFailover", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "DatabaseFailover" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.DatabaseFailoverRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "database_failover" + }, + "description": "Sample for DatabaseFailover", + "file": "composer_v1_generated_environments_database_failover_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_DatabaseFailover_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_database_failover_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "composer_v1_generated_environments_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_DeleteEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.delete_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "composer_v1_generated_environments_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_DeleteEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.execute_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.ExecuteAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "ExecuteAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandResponse", + "shortName": "execute_airflow_command" + }, + "description": "Sample for ExecuteAirflowCommand", + "file": "composer_v1_generated_environments_execute_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_ExecuteAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_execute_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.execute_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.ExecuteAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "ExecuteAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.ExecuteAirflowCommandResponse", + "shortName": "execute_airflow_command" + }, + "description": "Sample for ExecuteAirflowCommand", + "file": "composer_v1_generated_environments_execute_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_ExecuteAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_execute_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.fetch_database_properties", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.FetchDatabaseProperties", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "FetchDatabaseProperties" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesResponse", + "shortName": "fetch_database_properties" + }, + "description": "Sample for FetchDatabaseProperties", + "file": "composer_v1_generated_environments_fetch_database_properties_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_FetchDatabaseProperties_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_fetch_database_properties_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.fetch_database_properties", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.FetchDatabaseProperties", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "FetchDatabaseProperties" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.FetchDatabasePropertiesResponse", + "shortName": "fetch_database_properties" + }, + "description": "Sample for FetchDatabaseProperties", + "file": "composer_v1_generated_environments_fetch_database_properties_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_FetchDatabaseProperties_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_fetch_database_properties_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "composer_v1_generated_environments_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_GetEnvironment_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.get_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "composer_v1_generated_environments_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_GetEnvironment_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.services.environments.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "composer_v1_generated_environments_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_ListEnvironments_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.list_environments", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.services.environments.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "composer_v1_generated_environments_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_ListEnvironments_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_load_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.poll_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.PollAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "PollAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse", + "shortName": "poll_airflow_command" + }, + "description": "Sample for PollAirflowCommand", + "file": "composer_v1_generated_environments_poll_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_PollAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_poll_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.poll_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.PollAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "PollAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.PollAirflowCommandResponse", + "shortName": "poll_airflow_command" + }, + "description": "Sample for PollAirflowCommand", + "file": "composer_v1_generated_environments_poll_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_PollAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_poll_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_save_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.stop_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.StopAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "StopAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandResponse", + "shortName": "stop_airflow_command" + }, + "description": "Sample for StopAirflowCommand", + "file": "composer_v1_generated_environments_stop_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_StopAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_stop_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.stop_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.StopAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "StopAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.types.StopAirflowCommandResponse", + "shortName": "stop_airflow_command" + }, + "description": "Sample for StopAirflowCommand", + "file": "composer_v1_generated_environments_stop_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_StopAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_stop_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "composer_v1_generated_environments_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_UpdateEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.EnvironmentsClient.update_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.UpdateEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "composer_v1_generated_environments_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_Environments_UpdateEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_environments_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.ImageVersionsAsyncClient", + "shortName": "ImageVersionsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.ImageVersionsAsyncClient.list_image_versions", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.ImageVersions.ListImageVersions", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.ImageVersions", + "shortName": "ImageVersions" + }, + "shortName": "ListImageVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ListImageVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.services.image_versions.pagers.ListImageVersionsAsyncPager", + "shortName": "list_image_versions" + }, + "description": "Sample for ListImageVersions", + "file": "composer_v1_generated_image_versions_list_image_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_ImageVersions_ListImageVersions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_image_versions_list_image_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1.ImageVersionsClient", + "shortName": "ImageVersionsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1.ImageVersionsClient.list_image_versions", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1.ImageVersions.ListImageVersions", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1.ImageVersions", + "shortName": "ImageVersions" + }, + "shortName": "ListImageVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1.types.ListImageVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1.services.image_versions.pagers.ListImageVersionsPager", + "shortName": "list_image_versions" + }, + "description": "Sample for ListImageVersions", + "file": "composer_v1_generated_image_versions_list_image_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1_generated_ImageVersions_ListImageVersions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1_generated_image_versions_list_image_versions_sync.py" + } + ] +} diff --git a/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json new file mode 100644 index 000000000000..fe40746345fa --- /dev/null +++ b/packages/google-cloud-orchestration-airflow/samples/generated_samples/snippet_metadata_google.cloud.orchestration.airflow.service.v1beta1.json @@ -0,0 +1,2382 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.orchestration.airflow.service.v1beta1", + "version": "v1beta1" + } + ], + "language": "PYTHON", + "name": "google-cloud-orchestration-airflow-service", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1beta1_generated_environments_check_upgrade_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_CheckUpgrade_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_check_upgrade_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.check_upgrade", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.CheckUpgrade", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CheckUpgrade" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.CheckUpgradeRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "check_upgrade" + }, + "description": "Sample for CheckUpgrade", + "file": "composer_v1beta1_generated_environments_check_upgrade_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_CheckUpgrade_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_check_upgrade_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.create_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "composer_v1beta1_generated_environments_create_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_CreateEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_create_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.create_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.CreateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "CreateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.CreateEnvironmentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_environment" + }, + "description": "Sample for CreateEnvironment", + "file": "composer_v1beta1_generated_environments_create_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_CreateEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_create_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.database_failover", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.DatabaseFailover", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DatabaseFailover" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "database_failover" + }, + "description": "Sample for DatabaseFailover", + "file": "composer_v1beta1_generated_environments_database_failover_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_DatabaseFailover_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_database_failover_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.database_failover", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.DatabaseFailover", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DatabaseFailover" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.DatabaseFailoverRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "database_failover" + }, + "description": "Sample for DatabaseFailover", + "file": "composer_v1beta1_generated_environments_database_failover_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_DatabaseFailover_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_database_failover_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.delete_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "composer_v1beta1_generated_environments_delete_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_DeleteEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_delete_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.delete_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.DeleteEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "DeleteEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.DeleteEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_environment" + }, + "description": "Sample for DeleteEnvironment", + "file": "composer_v1beta1_generated_environments_delete_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_DeleteEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_delete_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.execute_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.ExecuteAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ExecuteAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandResponse", + "shortName": "execute_airflow_command" + }, + "description": "Sample for ExecuteAirflowCommand", + "file": "composer_v1beta1_generated_environments_execute_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_ExecuteAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_execute_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.execute_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.ExecuteAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ExecuteAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.ExecuteAirflowCommandResponse", + "shortName": "execute_airflow_command" + }, + "description": "Sample for ExecuteAirflowCommand", + "file": "composer_v1beta1_generated_environments_execute_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_ExecuteAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_execute_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.fetch_database_properties", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.FetchDatabaseProperties", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "FetchDatabaseProperties" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesResponse", + "shortName": "fetch_database_properties" + }, + "description": "Sample for FetchDatabaseProperties", + "file": "composer_v1beta1_generated_environments_fetch_database_properties_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_FetchDatabaseProperties_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_fetch_database_properties_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.fetch_database_properties", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.FetchDatabaseProperties", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "FetchDatabaseProperties" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.FetchDatabasePropertiesResponse", + "shortName": "fetch_database_properties" + }, + "description": "Sample for FetchDatabaseProperties", + "file": "composer_v1beta1_generated_environments_fetch_database_properties_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_FetchDatabaseProperties_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_fetch_database_properties_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.get_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "composer_v1beta1_generated_environments_get_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_GetEnvironment_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_get_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.get_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.GetEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "GetEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.GetEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment", + "shortName": "get_environment" + }, + "description": "Sample for GetEnvironment", + "file": "composer_v1beta1_generated_environments_get_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_GetEnvironment_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_get_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.list_environments", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.services.environments.pagers.ListEnvironmentsAsyncPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "composer_v1beta1_generated_environments_list_environments_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_ListEnvironments_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_list_environments_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.list_environments", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.ListEnvironments", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "ListEnvironments" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ListEnvironmentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.services.environments.pagers.ListEnvironmentsPager", + "shortName": "list_environments" + }, + "description": "Sample for ListEnvironments", + "file": "composer_v1beta1_generated_environments_list_environments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_ListEnvironments_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_list_environments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.load_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.LoadSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "LoadSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.LoadSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "load_snapshot" + }, + "description": "Sample for LoadSnapshot", + "file": "composer_v1beta1_generated_environments_load_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_LoadSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_load_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.poll_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.PollAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "PollAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse", + "shortName": "poll_airflow_command" + }, + "description": "Sample for PollAirflowCommand", + "file": "composer_v1beta1_generated_environments_poll_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_PollAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_poll_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.poll_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.PollAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "PollAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.PollAirflowCommandResponse", + "shortName": "poll_airflow_command" + }, + "description": "Sample for PollAirflowCommand", + "file": "composer_v1beta1_generated_environments_poll_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_PollAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_poll_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.restart_web_server", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.RestartWebServer", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "RestartWebServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.RestartWebServerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_web_server" + }, + "description": "Sample for RestartWebServer", + "file": "composer_v1beta1_generated_environments_restart_web_server_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_RestartWebServer_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_restart_web_server_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.restart_web_server", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.RestartWebServer", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "RestartWebServer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.RestartWebServerRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_web_server" + }, + "description": "Sample for RestartWebServer", + "file": "composer_v1beta1_generated_environments_restart_web_server_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_RestartWebServer_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_restart_web_server_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.save_snapshot", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.SaveSnapshot", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "SaveSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.SaveSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "save_snapshot" + }, + "description": "Sample for SaveSnapshot", + "file": "composer_v1beta1_generated_environments_save_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_SaveSnapshot_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_save_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.stop_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.StopAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "StopAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandResponse", + "shortName": "stop_airflow_command" + }, + "description": "Sample for StopAirflowCommand", + "file": "composer_v1beta1_generated_environments_stop_airflow_command_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_StopAirflowCommand_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_stop_airflow_command_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.stop_airflow_command", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.StopAirflowCommand", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "StopAirflowCommand" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.types.StopAirflowCommandResponse", + "shortName": "stop_airflow_command" + }, + "description": "Sample for StopAirflowCommand", + "file": "composer_v1beta1_generated_environments_stop_airflow_command_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_StopAirflowCommand_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_stop_airflow_command_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient", + "shortName": "EnvironmentsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsAsyncClient.update_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.UpdateEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "composer_v1beta1_generated_environments_update_environment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_UpdateEnvironment_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_update_environment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient", + "shortName": "EnvironmentsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.EnvironmentsClient.update_environment", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments.UpdateEnvironment", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.Environments", + "shortName": "Environments" + }, + "shortName": "UpdateEnvironment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.UpdateEnvironmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "environment", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.Environment" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_environment" + }, + "description": "Sample for UpdateEnvironment", + "file": "composer_v1beta1_generated_environments_update_environment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_Environments_UpdateEnvironment_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_environments_update_environment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.ImageVersionsAsyncClient", + "shortName": "ImageVersionsAsyncClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.ImageVersionsAsyncClient.list_image_versions", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.ImageVersions.ListImageVersions", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.ImageVersions", + "shortName": "ImageVersions" + }, + "shortName": "ListImageVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ListImageVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.services.image_versions.pagers.ListImageVersionsAsyncPager", + "shortName": "list_image_versions" + }, + "description": "Sample for ListImageVersions", + "file": "composer_v1beta1_generated_image_versions_list_image_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_ImageVersions_ListImageVersions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_image_versions_list_image_versions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.ImageVersionsClient", + "shortName": "ImageVersionsClient" + }, + "fullName": "google.cloud.orchestration.airflow.service_v1beta1.ImageVersionsClient.list_image_versions", + "method": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.ImageVersions.ListImageVersions", + "service": { + "fullName": "google.cloud.orchestration.airflow.service.v1beta1.ImageVersions", + "shortName": "ImageVersions" + }, + "shortName": "ListImageVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.orchestration.airflow.service_v1beta1.types.ListImageVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.orchestration.airflow.service_v1beta1.services.image_versions.pagers.ListImageVersionsPager", + "shortName": "list_image_versions" + }, + "description": "Sample for ListImageVersions", + "file": "composer_v1beta1_generated_image_versions_list_image_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "composer_v1beta1_generated_ImageVersions_ListImageVersions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "composer_v1beta1_generated_image_versions_list_image_versions_sync.py" + } + ] +} diff --git a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py index 31a0db1fc126..a5e0a38cc890 100644 --- a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py +++ b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1_keywords.py @@ -40,12 +40,17 @@ class serviceCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_environment': ('parent', 'environment', ), + 'database_failover': ('environment', ), 'delete_environment': ('name', ), + 'execute_airflow_command': ('environment', 'command', 'subcommand', 'parameters', ), + 'fetch_database_properties': ('environment', ), 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), + 'poll_airflow_command': ('environment', 'execution_id', 'pod', 'pod_namespace', 'next_line_number', ), 'save_snapshot': ('environment', 'snapshot_location', ), + 'stop_airflow_command': ('environment', 'execution_id', 'pod', 'pod_namespace', 'force', ), 'update_environment': ('name', 'environment', 'update_mask', ), } diff --git a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1beta1_keywords.py b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1beta1_keywords.py index a4ae0cb0021e..ec51364c3cad 100644 --- a/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1beta1_keywords.py +++ b/packages/google-cloud-orchestration-airflow/scripts/fixup_service_v1beta1_keywords.py @@ -41,13 +41,18 @@ class serviceCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'check_upgrade': ('environment', 'image_version', ), 'create_environment': ('parent', 'environment', ), + 'database_failover': ('environment', ), 'delete_environment': ('name', ), + 'execute_airflow_command': ('environment', 'command', 'subcommand', 'parameters', ), + 'fetch_database_properties': ('environment', ), 'get_environment': ('name', ), 'list_environments': ('parent', 'page_size', 'page_token', ), 'list_image_versions': ('parent', 'page_size', 'page_token', 'include_past_releases', ), 'load_snapshot': ('environment', 'snapshot_path', 'skip_pypi_packages_installation', 'skip_environment_variables_setting', 'skip_airflow_overrides_setting', 'skip_gcs_data_copying', ), + 'poll_airflow_command': ('environment', 'execution_id', 'pod', 'pod_namespace', 'next_line_number', ), 'restart_web_server': ('name', ), 'save_snapshot': ('environment', 'snapshot_location', ), + 'stop_airflow_command': ('environment', 'execution_id', 'pod', 'pod_namespace', 'force', ), 'update_environment': ('update_mask', 'name', 'environment', ), } diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py index 589f5ab3d15f..afd2db8d888c 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1/test_environments.py @@ -2137,11 +2137,11 @@ async def test_delete_environment_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - environments.SaveSnapshotRequest, + environments.ExecuteAirflowCommandRequest, dict, ], ) -def test_save_snapshot(request_type, transport: str = "grpc"): +def test_execute_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2152,21 +2152,32 @@ def test_save_snapshot(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.save_snapshot(request) + call.return_value = environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", + ) + response = client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" -def test_save_snapshot_empty_call(): +def test_execute_airflow_command_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EnvironmentsClient( @@ -2175,16 +2186,19 @@ def test_save_snapshot_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: - client.save_snapshot() + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: + client.execute_airflow_command() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() @pytest.mark.asyncio -async def test_save_snapshot_async( - transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +async def test_execute_airflow_command_async( + transport: str = "grpc_asyncio", + request_type=environments.ExecuteAirflowCommandRequest, ): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2196,42 +2210,55 @@ async def test_save_snapshot_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", + ) ) - response = await client.save_snapshot(request) + response = await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" @pytest.mark.asyncio -async def test_save_snapshot_async_from_dict(): - await test_save_snapshot_async(request_type=dict) +async def test_execute_airflow_command_async_from_dict(): + await test_execute_airflow_command_async(request_type=dict) -def test_save_snapshot_field_headers(): +def test_execute_airflow_command_field_headers(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.SaveSnapshotRequest() + request = environments.ExecuteAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.save_snapshot(request) + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: + call.return_value = environments.ExecuteAirflowCommandResponse() + client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2247,23 +2274,25 @@ def test_save_snapshot_field_headers(): @pytest.mark.asyncio -async def test_save_snapshot_field_headers_async(): +async def test_execute_airflow_command_field_headers_async(): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.SaveSnapshotRequest() + request = environments.ExecuteAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + environments.ExecuteAirflowCommandResponse() ) - await client.save_snapshot(request) + await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2281,11 +2310,11 @@ async def test_save_snapshot_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - environments.LoadSnapshotRequest, + environments.StopAirflowCommandRequest, dict, ], ) -def test_load_snapshot(request_type, transport: str = "grpc"): +def test_stop_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2296,21 +2325,28 @@ def test_load_snapshot(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.load_snapshot(request) + call.return_value = environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], + ) + response = client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] -def test_load_snapshot_empty_call(): +def test_stop_airflow_command_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EnvironmentsClient( @@ -2319,16 +2355,18 @@ def test_load_snapshot_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: - client.load_snapshot() + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: + client.stop_airflow_command() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() @pytest.mark.asyncio -async def test_load_snapshot_async( - transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +async def test_stop_airflow_command_async( + transport: str = "grpc_asyncio", request_type=environments.StopAirflowCommandRequest ): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2340,42 +2378,51 @@ async def test_load_snapshot_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], + ) ) - response = await client.load_snapshot(request) + response = await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] @pytest.mark.asyncio -async def test_load_snapshot_async_from_dict(): - await test_load_snapshot_async(request_type=dict) +async def test_stop_airflow_command_async_from_dict(): + await test_stop_airflow_command_async(request_type=dict) -def test_load_snapshot_field_headers(): +def test_stop_airflow_command_field_headers(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.LoadSnapshotRequest() + request = environments.StopAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.load_snapshot(request) + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: + call.return_value = environments.StopAirflowCommandResponse() + client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2391,23 +2438,25 @@ def test_load_snapshot_field_headers(): @pytest.mark.asyncio -async def test_load_snapshot_field_headers_async(): +async def test_stop_airflow_command_field_headers_async(): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.LoadSnapshotRequest() + request = environments.StopAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + environments.StopAirflowCommandResponse() ) - await client.load_snapshot(request) + await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2425,173 +2474,1763 @@ async def test_load_snapshot_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - environments.CreateEnvironmentRequest, + environments.PollAirflowCommandRequest, dict, ], ) -def test_create_environment_rest(request_type): +def test_poll_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - }, - "environment_size": 1, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = environments.PollAirflowCommandResponse( + output_end=True, + ) + response = client.poll_airflow_command(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_environment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_environment_rest_interceptors(null_interceptor): - transport = transports.EnvironmentsRestTransport( +def test_poll_airflow_command_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EnvironmentsRestInterceptor(), + transport="grpc", ) - client = EnvironmentsClient(transport=transport) + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_create_environment" - ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_create_environment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = environments.CreateEnvironmentRequest.pb( - environments.CreateEnvironmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + type(client.transport.poll_airflow_command), "__call__" + ) as call: + client.poll_airflow_command() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() + + +@pytest.mark.asyncio +async def test_poll_airflow_command_async( + transport: str = "grpc_asyncio", request_type=environments.PollAirflowCommandRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.PollAirflowCommandResponse( + output_end=True, + ) + ) + response = await client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True + + +@pytest.mark.asyncio +async def test_poll_airflow_command_async_from_dict(): + await test_poll_airflow_command_async(request_type=dict) + + +def test_poll_airflow_command_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.PollAirflowCommandRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + call.return_value = environments.PollAirflowCommandResponse() + client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_poll_airflow_command_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.PollAirflowCommandRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.PollAirflowCommandResponse() + ) + await client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.DatabaseFailoverRequest, + dict, + ], +) +def test_database_failover(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_database_failover_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + client.database_failover() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + +@pytest.mark.asyncio +async def test_database_failover_async( + transport: str = "grpc_asyncio", request_type=environments.DatabaseFailoverRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_database_failover_async_from_dict(): + await test_database_failover_async(request_type=dict) + + +def test_database_failover_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.DatabaseFailoverRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_database_failover_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.DatabaseFailoverRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.FetchDatabasePropertiesRequest, + dict, + ], +) +def test_fetch_database_properties(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) + response = client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +def test_fetch_database_properties_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + client.fetch_database_properties() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + +@pytest.mark.asyncio +async def test_fetch_database_properties_async( + transport: str = "grpc_asyncio", + request_type=environments.FetchDatabasePropertiesRequest, +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) + ) + response = await client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +@pytest.mark.asyncio +async def test_fetch_database_properties_async_from_dict(): + await test_fetch_database_properties_async(request_type=dict) + + +def test_fetch_database_properties_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.FetchDatabasePropertiesRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + call.return_value = environments.FetchDatabasePropertiesResponse() + client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_database_properties_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.FetchDatabasePropertiesRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.FetchDatabasePropertiesResponse() + ) + await client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.CreateEnvironmentRequest, + dict, + ], +) +def test_create_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + }, + "environment_size": 1, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_environment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_create_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.CreateEnvironmentRequest.pb( + environments.CreateEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = environments.CreateEnvironmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_environment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_environment_rest_bad_request( + transport: str = "rest", request_type=environments.CreateEnvironmentRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + }, + "environment_size": 1, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_environment(request) + + +def test_create_environment_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + environment=environments.Environment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/environments" + % client.transport._host, + args[1], + ) + + +def test_create_environment_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + environments.CreateEnvironmentRequest(), + parent="parent_value", + environment=environments.Environment(name="name_value"), + ) + + +def test_create_environment_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.GetEnvironmentRequest, + dict, + ], +) +def test_get_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.Environment( + name="name_value", + uuid="uuid_value", + state=environments.Environment.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_environment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.Environment) + assert response.name == "name_value" + assert response.uuid == "uuid_value" + assert response.state == environments.Environment.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_get_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.GetEnvironmentRequest.pb( + environments.GetEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.Environment.to_json( + environments.Environment() + ) + + request = environments.GetEnvironmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.Environment() + + client.get_environment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_environment_rest_bad_request( + transport: str = "rest", request_type=environments.GetEnvironmentRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_environment(request) + + +def test_get_environment_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.Environment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/environments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/environments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_environment_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + environments.GetEnvironmentRequest(), + name="name_value", + ) + + +def test_get_environment_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.ListEnvironmentsRequest, + dict, + ], +) +def test_list_environments_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.ListEnvironmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_environments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_environments_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_list_environments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.ListEnvironmentsRequest.pb( + environments.ListEnvironmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.ListEnvironmentsResponse.to_json( + environments.ListEnvironmentsResponse() + ) + + request = environments.ListEnvironmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.ListEnvironmentsResponse() + + client.list_environments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_environments_rest_bad_request( + transport: str = "rest", request_type=environments.ListEnvironmentsRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_environments(request) + + +def test_list_environments_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.ListEnvironmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_environments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/environments" + % client.transport._host, + args[1], + ) + + +def test_list_environments_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + environments.ListEnvironmentsRequest(), + parent="parent_value", + ) + + +def test_list_environments_rest_pager(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + environments.Environment(), + environments.Environment(), + ], + next_page_token="abc", + ), + environments.ListEnvironmentsResponse( + environments=[], + next_page_token="def", + ), + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + ], + next_page_token="ghi", + ), + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + environments.Environment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + environments.ListEnvironmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_environments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, environments.Environment) for i in results) + + pages = list(client.list_environments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + environments.UpdateEnvironmentRequest, + dict, + ], +) +def test_update_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + }, + "environment_size": 1, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_environment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_update_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.UpdateEnvironmentRequest.pb( + environments.UpdateEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } req.return_value = Response() req.return_value.status_code = 200 @@ -2600,7 +4239,7 @@ def test_create_environment_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.CreateEnvironmentRequest() + request = environments.UpdateEnvironmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2608,7 +4247,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_environment( + client.update_environment( request, metadata=[ ("key", "val"), @@ -2620,8 +4259,8 @@ def test_create_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_environment_rest_bad_request( - transport: str = "rest", request_type=environments.CreateEnvironmentRequest +def test_update_environment_rest_bad_request( + transport: str = "rest", request_type=environments.UpdateEnvironmentRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2629,7 +4268,7 @@ def test_create_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} request_init["environment"] = { "name": "name_value", "config": { @@ -2727,6 +4366,7 @@ def test_create_environment_rest_bad_request( "time_zone": "time_zone_value", } }, + "resilience_mode": 1, }, "uuid": "uuid_value", "state": 1, @@ -2745,10 +4385,10 @@ def test_create_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_environment(request) + client.update_environment(request) -def test_create_environment_rest_flattened(): +def test_update_environment_rest_flattened(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2760,12 +4400,15 @@ def test_create_environment_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/environments/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", environment=environments.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -2776,20 +4419,20 @@ def test_create_environment_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_environment(**mock_args) + client.update_environment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/environments" + "%s/v1/{name=projects/*/locations/*/environments/*}" % client.transport._host, args[1], ) -def test_create_environment_rest_flattened_error(transport: str = "rest"): +def test_update_environment_rest_flattened_error(transport: str = "rest"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2798,14 +4441,15 @@ def test_create_environment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_environment( - environments.CreateEnvironmentRequest(), - parent="parent_value", + client.update_environment( + environments.UpdateEnvironmentRequest(), + name="name_value", environment=environments.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_environment_rest_error(): +def test_update_environment_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -2814,11 +4458,11 @@ def test_create_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.GetEnvironmentRequest, + environments.DeleteEnvironmentRequest, dict, ], ) -def test_get_environment_rest(request_type): +def test_delete_environment_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2831,31 +4475,23 @@ def test_get_environment_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = environments.Environment( - name="name_value", - uuid="uuid_value", - state=environments.Environment.State.CREATING, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_environment(request) + response = client.delete_environment(request) # Establish that the response is the type that we expect. - assert isinstance(response, environments.Environment) - assert response.name == "name_value" - assert response.uuid == "uuid_value" - assert response.state == environments.Environment.State.CREATING + assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_environment_rest_interceptors(null_interceptor): +def test_delete_environment_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -2868,14 +4504,16 @@ def test_get_environment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_get_environment" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_delete_environment" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_get_environment" + transports.EnvironmentsRestInterceptor, "pre_delete_environment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.GetEnvironmentRequest.pb( - environments.GetEnvironmentRequest() + pb_message = environments.DeleteEnvironmentRequest.pb( + environments.DeleteEnvironmentRequest() ) transcode.return_value = { "method": "post", @@ -2887,19 +4525,19 @@ def test_get_environment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = environments.Environment.to_json( - environments.Environment() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = environments.GetEnvironmentRequest() + request = environments.DeleteEnvironmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = environments.Environment() + post.return_value = operations_pb2.Operation() - client.get_environment( + client.delete_environment( request, metadata=[ ("key", "val"), @@ -2911,8 +4549,8 @@ def test_get_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_environment_rest_bad_request( - transport: str = "rest", request_type=environments.GetEnvironmentRequest +def test_delete_environment_rest_bad_request( + transport: str = "rest", request_type=environments.DeleteEnvironmentRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2932,10 +4570,10 @@ def test_get_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_environment(request) + client.delete_environment(request) -def test_get_environment_rest_flattened(): +def test_delete_environment_rest_flattened(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2944,7 +4582,7 @@ def test_get_environment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = environments.Environment() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { @@ -2960,12 +4598,11 @@ def test_get_environment_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_environment(**mock_args) + client.delete_environment(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -2978,7 +4615,7 @@ def test_get_environment_rest_flattened(): ) -def test_get_environment_rest_flattened_error(transport: str = "rest"): +def test_delete_environment_rest_flattened_error(transport: str = "rest"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2987,13 +4624,13 @@ def test_get_environment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_environment( - environments.GetEnvironmentRequest(), + client.delete_environment( + environments.DeleteEnvironmentRequest(), name="name_value", ) -def test_get_environment_rest_error(): +def test_delete_environment_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3002,44 +4639,52 @@ def test_get_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.ListEnvironmentsRequest, + environments.ExecuteAirflowCommandRequest, dict, ], ) -def test_list_environments_rest(request_type): +def test_execute_airflow_command_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = environments.ListEnvironmentsResponse( - next_page_token="next_page_token_value", + return_value = environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) + pb_return_value = environments.ExecuteAirflowCommandResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_environments(request) + response = client.execute_airflow_command(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_environments_rest_interceptors(null_interceptor): +def test_execute_airflow_command_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3052,14 +4697,14 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_list_environments" + transports.EnvironmentsRestInterceptor, "post_execute_airflow_command" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_list_environments" + transports.EnvironmentsRestInterceptor, "pre_execute_airflow_command" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.ListEnvironmentsRequest.pb( - environments.ListEnvironmentsRequest() + pb_message = environments.ExecuteAirflowCommandRequest.pb( + environments.ExecuteAirflowCommandRequest() ) transcode.return_value = { "method": "post", @@ -3071,19 +4716,19 @@ def test_list_environments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = environments.ListEnvironmentsResponse.to_json( - environments.ListEnvironmentsResponse() + req.return_value._content = environments.ExecuteAirflowCommandResponse.to_json( + environments.ExecuteAirflowCommandResponse() ) - request = environments.ListEnvironmentsRequest() + request = environments.ExecuteAirflowCommandRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = environments.ListEnvironmentsResponse() + post.return_value = environments.ExecuteAirflowCommandResponse() - client.list_environments( + client.execute_airflow_command( request, metadata=[ ("key", "val"), @@ -3095,8 +4740,8 @@ def test_list_environments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_environments_rest_bad_request( - transport: str = "rest", request_type=environments.ListEnvironmentsRequest +def test_execute_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.ExecuteAirflowCommandRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3104,281 +4749,74 @@ def test_list_environments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with mock.patch.object(Session, "request") as req, pytest.raises( core_exceptions.BadRequest ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_environments(request) - - -def test_list_environments_rest_flattened(): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = environments.ListEnvironmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_environments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/environments" - % client.transport._host, - args[1], - ) - - -def test_list_environments_rest_flattened_error(transport: str = "rest"): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_environments( - environments.ListEnvironmentsRequest(), - parent="parent_value", - ) - - -def test_list_environments_rest_pager(transport: str = "rest"): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - environments.Environment(), - environments.Environment(), - ], - next_page_token="abc", - ), - environments.ListEnvironmentsResponse( - environments=[], - next_page_token="def", - ), - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - ], - next_page_token="ghi", - ), - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - environments.Environment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - environments.ListEnvironmentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_environments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, environments.Environment) for i in results) - - pages = list(client.list_environments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - environments.UpdateEnvironmentRequest, - dict, - ], -) -def test_update_environment_rest(request_type): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - }, - "environment_size": 1, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.execute_airflow_command(request) + + +def test_execute_airflow_command_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.StopAirflowCommandRequest, + dict, + ], +) +def test_stop_airflow_command_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.StopAirflowCommandResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_environment(request) + response = client.stop_airflow_command(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_environment_rest_interceptors(null_interceptor): +def test_stop_airflow_command_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3391,16 +4829,14 @@ def test_update_environment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_update_environment" + transports.EnvironmentsRestInterceptor, "post_stop_airflow_command" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_update_environment" + transports.EnvironmentsRestInterceptor, "pre_stop_airflow_command" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.UpdateEnvironmentRequest.pb( - environments.UpdateEnvironmentRequest() + pb_message = environments.StopAirflowCommandRequest.pb( + environments.StopAirflowCommandRequest() ) transcode.return_value = { "method": "post", @@ -3412,19 +4848,19 @@ def test_update_environment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = environments.StopAirflowCommandResponse.to_json( + environments.StopAirflowCommandResponse() ) - request = environments.UpdateEnvironmentRequest() + request = environments.StopAirflowCommandRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = environments.StopAirflowCommandResponse() - client.update_environment( + client.stop_airflow_command( request, metadata=[ ("key", "val"), @@ -3436,8 +4872,8 @@ def test_update_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_environment_rest_bad_request( - transport: str = "rest", request_type=environments.UpdateEnvironmentRequest +def test_stop_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.StopAirflowCommandRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3445,110 +4881,8 @@ def test_update_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - }, - "environment_size": 1, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" } request = request_type(**request_init) @@ -3561,71 +4895,140 @@ def test_update_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_environment(request) + client.stop_airflow_command(request) -def test_update_environment_rest_flattened(): +def test_stop_airflow_command_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.PollAirflowCommandRequest, + dict, + ], +) +def test_poll_airflow_command_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/environments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - environment=environments.Environment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + return_value = environments.PollAirflowCommandResponse( + output_end=True, ) - mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.PollAirflowCommandResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + response = client.poll_airflow_command(request) - client.update_environment(**mock_args) + # Establish that the response is the type that we expect. + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/environments/*}" - % client.transport._host, - args[1], + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_poll_airflow_command_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_poll_airflow_command" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_poll_airflow_command" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.PollAirflowCommandRequest.pb( + environments.PollAirflowCommandRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.PollAirflowCommandResponse.to_json( + environments.PollAirflowCommandResponse() ) + request = environments.PollAirflowCommandRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.PollAirflowCommandResponse() -def test_update_environment_rest_flattened_error(transport: str = "rest"): + client.poll_airflow_command( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_poll_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.PollAirflowCommandRequest +): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_environment( - environments.UpdateEnvironmentRequest(), - name="name_value", - environment=environments.Environment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.poll_airflow_command(request) -def test_update_environment_rest_error(): +def test_poll_airflow_command_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3634,18 +5037,20 @@ def test_update_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.DeleteEnvironmentRequest, + environments.SaveSnapshotRequest, dict, ], ) -def test_delete_environment_rest(request_type): +def test_save_snapshot_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3660,14 +5065,14 @@ def test_delete_environment_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_environment(request) + response = client.save_snapshot(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_environment_rest_interceptors(null_interceptor): +def test_save_snapshot_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3682,14 +5087,14 @@ def test_delete_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_delete_environment" + transports.EnvironmentsRestInterceptor, "post_save_snapshot" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_delete_environment" + transports.EnvironmentsRestInterceptor, "pre_save_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.DeleteEnvironmentRequest.pb( - environments.DeleteEnvironmentRequest() + pb_message = environments.SaveSnapshotRequest.pb( + environments.SaveSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -3705,7 +5110,7 @@ def test_delete_environment_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.DeleteEnvironmentRequest() + request = environments.SaveSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3713,7 +5118,7 @@ def test_delete_environment_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.delete_environment( + client.save_snapshot( request, metadata=[ ("key", "val"), @@ -3725,8 +5130,8 @@ def test_delete_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_environment_rest_bad_request( - transport: str = "rest", request_type=environments.DeleteEnvironmentRequest +def test_save_snapshot_rest_bad_request( + transport: str = "rest", request_type=environments.SaveSnapshotRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3734,7 +5139,9 @@ def test_delete_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3746,67 +5153,138 @@ def test_delete_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_environment(request) + client.save_snapshot(request) -def test_delete_environment_rest_flattened(): +def test_save_snapshot_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/environments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + response = client.load_snapshot(request) - client.delete_environment(**mock_args) + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/environments/*}" - % client.transport._host, - args[1], + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_load_snapshot_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_load_snapshot" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_load_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.LoadSnapshotRequest.pb( + environments.LoadSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) + request = environments.LoadSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() -def test_delete_environment_rest_flattened_error(transport: str = "rest"): + client.load_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_load_snapshot_rest_bad_request( + transport: str = "rest", request_type=environments.LoadSnapshotRequest +): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_environment( - environments.DeleteEnvironmentRequest(), - name="name_value", - ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.load_snapshot(request) -def test_delete_environment_rest_error(): + +def test_load_snapshot_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3815,11 +5293,11 @@ def test_delete_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.SaveSnapshotRequest, + environments.DatabaseFailoverRequest, dict, ], ) -def test_save_snapshot_rest(request_type): +def test_database_failover_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3843,14 +5321,14 @@ def test_save_snapshot_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.save_snapshot(request) + response = client.database_failover(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_save_snapshot_rest_interceptors(null_interceptor): +def test_database_failover_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3865,14 +5343,14 @@ def test_save_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_save_snapshot" + transports.EnvironmentsRestInterceptor, "post_database_failover" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_save_snapshot" + transports.EnvironmentsRestInterceptor, "pre_database_failover" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.SaveSnapshotRequest.pb( - environments.SaveSnapshotRequest() + pb_message = environments.DatabaseFailoverRequest.pb( + environments.DatabaseFailoverRequest() ) transcode.return_value = { "method": "post", @@ -3888,7 +5366,7 @@ def test_save_snapshot_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.SaveSnapshotRequest() + request = environments.DatabaseFailoverRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -3896,7 +5374,7 @@ def test_save_snapshot_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.save_snapshot( + client.database_failover( request, metadata=[ ("key", "val"), @@ -3908,8 +5386,8 @@ def test_save_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_save_snapshot_rest_bad_request( - transport: str = "rest", request_type=environments.SaveSnapshotRequest +def test_database_failover_rest_bad_request( + transport: str = "rest", request_type=environments.DatabaseFailoverRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3931,10 +5409,10 @@ def test_save_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.save_snapshot(request) + client.database_failover(request) -def test_save_snapshot_rest_error(): +def test_database_failover_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3943,11 +5421,11 @@ def test_save_snapshot_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.LoadSnapshotRequest, + environments.FetchDatabasePropertiesRequest, dict, ], ) -def test_load_snapshot_rest(request_type): +def test_fetch_database_properties_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3962,23 +5440,119 @@ def test_load_snapshot_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.FetchDatabasePropertiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.load_snapshot(request) + response = client.fetch_database_properties(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +def test_fetch_database_properties_rest_required_fields( + request_type=environments.FetchDatabasePropertiesRequest, +): + transport_class = transports.EnvironmentsRestTransport + + request_init = {} + request_init["environment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_database_properties._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["environment"] = "environment_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_database_properties._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "environment" in jsonified_request + assert jsonified_request["environment"] == "environment_value" + + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = environments.FetchDatabasePropertiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = environments.FetchDatabasePropertiesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_database_properties(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_database_properties_rest_unset_required_fields(): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_database_properties._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("environment",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_load_snapshot_rest_interceptors(null_interceptor): +def test_fetch_database_properties_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3991,16 +5565,14 @@ def test_load_snapshot_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_load_snapshot" + transports.EnvironmentsRestInterceptor, "post_fetch_database_properties" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_load_snapshot" + transports.EnvironmentsRestInterceptor, "pre_fetch_database_properties" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.LoadSnapshotRequest.pb( - environments.LoadSnapshotRequest() + pb_message = environments.FetchDatabasePropertiesRequest.pb( + environments.FetchDatabasePropertiesRequest() ) transcode.return_value = { "method": "post", @@ -4012,19 +5584,21 @@ def test_load_snapshot_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + environments.FetchDatabasePropertiesResponse.to_json( + environments.FetchDatabasePropertiesResponse() + ) ) - request = environments.LoadSnapshotRequest() + request = environments.FetchDatabasePropertiesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = environments.FetchDatabasePropertiesResponse() - client.load_snapshot( + client.fetch_database_properties( request, metadata=[ ("key", "val"), @@ -4036,8 +5610,8 @@ def test_load_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_load_snapshot_rest_bad_request( - transport: str = "rest", request_type=environments.LoadSnapshotRequest +def test_fetch_database_properties_rest_bad_request( + transport: str = "rest", request_type=environments.FetchDatabasePropertiesRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4059,10 +5633,10 @@ def test_load_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.load_snapshot(request) + client.fetch_database_properties(request) -def test_load_snapshot_rest_error(): +def test_fetch_database_properties_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4212,8 +5786,13 @@ def test_environments_base_transport(): "list_environments", "update_environment", "delete_environment", + "execute_airflow_command", + "stop_airflow_command", + "poll_airflow_command", "save_snapshot", "load_snapshot", + "database_failover", + "fetch_database_properties", "get_operation", "delete_operation", "list_operations", @@ -4509,12 +6088,27 @@ def test_environments_client_transport_session_collision(transport_name): session1 = client1.transport.delete_environment._session session2 = client2.transport.delete_environment._session assert session1 != session2 + session1 = client1.transport.execute_airflow_command._session + session2 = client2.transport.execute_airflow_command._session + assert session1 != session2 + session1 = client1.transport.stop_airflow_command._session + session2 = client2.transport.stop_airflow_command._session + assert session1 != session2 + session1 = client1.transport.poll_airflow_command._session + session2 = client2.transport.poll_airflow_command._session + assert session1 != session2 session1 = client1.transport.save_snapshot._session session2 = client2.transport.save_snapshot._session assert session1 != session2 session1 = client1.transport.load_snapshot._session session2 = client2.transport.load_snapshot._session assert session1 != session2 + session1 = client1.transport.database_failover._session + session2 = client2.transport.database_failover._session + assert session1 != session2 + session1 = client1.transport.fetch_database_properties._session + session2 = client2.transport.fetch_database_properties._session + assert session1 != session2 def test_environments_grpc_transport_channel(): diff --git a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py index 1d04aeb1d1e0..2e0991985c5d 100644 --- a/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py +++ b/packages/google-cloud-orchestration-airflow/tests/unit/gapic/service_v1beta1/test_environments.py @@ -2438,11 +2438,11 @@ async def test_check_upgrade_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - environments.SaveSnapshotRequest, + environments.ExecuteAirflowCommandRequest, dict, ], ) -def test_save_snapshot(request_type, transport: str = "grpc"): +def test_execute_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2453,21 +2453,32 @@ def test_save_snapshot(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.save_snapshot(request) + call.return_value = environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", + ) + response = client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" -def test_save_snapshot_empty_call(): +def test_execute_airflow_command_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EnvironmentsClient( @@ -2476,16 +2487,19 @@ def test_save_snapshot_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: - client.save_snapshot() + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: + client.execute_airflow_command() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() @pytest.mark.asyncio -async def test_save_snapshot_async( - transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +async def test_execute_airflow_command_async( + transport: str = "grpc_asyncio", + request_type=environments.ExecuteAirflowCommandRequest, ): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2497,42 +2511,55 @@ async def test_save_snapshot_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", + ) ) - response = await client.save_snapshot(request) + response = await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == environments.SaveSnapshotRequest() + assert args[0] == environments.ExecuteAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" @pytest.mark.asyncio -async def test_save_snapshot_async_from_dict(): - await test_save_snapshot_async(request_type=dict) +async def test_execute_airflow_command_async_from_dict(): + await test_execute_airflow_command_async(request_type=dict) -def test_save_snapshot_field_headers(): +def test_execute_airflow_command_field_headers(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.SaveSnapshotRequest() + request = environments.ExecuteAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.save_snapshot(request) + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: + call.return_value = environments.ExecuteAirflowCommandResponse() + client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2548,23 +2575,25 @@ def test_save_snapshot_field_headers(): @pytest.mark.asyncio -async def test_save_snapshot_field_headers_async(): +async def test_execute_airflow_command_field_headers_async(): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.SaveSnapshotRequest() + request = environments.ExecuteAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.execute_airflow_command), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + environments.ExecuteAirflowCommandResponse() ) - await client.save_snapshot(request) + await client.execute_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2582,11 +2611,11 @@ async def test_save_snapshot_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - environments.LoadSnapshotRequest, + environments.StopAirflowCommandRequest, dict, ], ) -def test_load_snapshot(request_type, transport: str = "grpc"): +def test_stop_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2597,21 +2626,28 @@ def test_load_snapshot(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.load_snapshot(request) + call.return_value = environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], + ) + response = client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] -def test_load_snapshot_empty_call(): +def test_stop_airflow_command_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EnvironmentsClient( @@ -2620,16 +2656,18 @@ def test_load_snapshot_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: - client.load_snapshot() + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: + client.stop_airflow_command() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() @pytest.mark.asyncio -async def test_load_snapshot_async( - transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +async def test_stop_airflow_command_async( + transport: str = "grpc_asyncio", request_type=environments.StopAirflowCommandRequest ): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2641,42 +2679,51 @@ async def test_load_snapshot_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], + ) ) - response = await client.load_snapshot(request) + response = await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == environments.LoadSnapshotRequest() + assert args[0] == environments.StopAirflowCommandRequest() # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] @pytest.mark.asyncio -async def test_load_snapshot_async_from_dict(): - await test_load_snapshot_async(request_type=dict) +async def test_stop_airflow_command_async_from_dict(): + await test_stop_airflow_command_async(request_type=dict) -def test_load_snapshot_field_headers(): +def test_stop_airflow_command_field_headers(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.LoadSnapshotRequest() + request = environments.StopAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.load_snapshot(request) + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: + call.return_value = environments.StopAirflowCommandResponse() + client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2692,23 +2739,25 @@ def test_load_snapshot_field_headers(): @pytest.mark.asyncio -async def test_load_snapshot_field_headers_async(): +async def test_stop_airflow_command_field_headers_async(): client = EnvironmentsAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = environments.LoadSnapshotRequest() + request = environments.StopAirflowCommandRequest() request.environment = "environment_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + with mock.patch.object( + type(client.transport.stop_airflow_command), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + environments.StopAirflowCommandResponse() ) - await client.load_snapshot(request) + await client.stop_airflow_command(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2726,185 +2775,1863 @@ async def test_load_snapshot_field_headers_async(): @pytest.mark.parametrize( "request_type", [ - environments.CreateEnvironmentRequest, + environments.PollAirflowCommandRequest, dict, ], ) -def test_create_environment_rest(request_type): +def test_poll_airflow_command(request_type, transport: str = "grpc"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - "cloud_data_lineage_integration": {"enabled": True}, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "max_pods_per_node": 1798, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, - }, - "environment_size": 1, - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = environments.PollAirflowCommandResponse( + output_end=True, + ) + response = client.poll_airflow_command(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_environment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_environment_rest_interceptors(null_interceptor): - transport = transports.EnvironmentsRestTransport( +def test_poll_airflow_command_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EnvironmentsRestInterceptor(), + transport="grpc", ) - client = EnvironmentsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_create_environment" - ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_create_environment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = environments.CreateEnvironmentRequest.pb( - environments.CreateEnvironmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + client.poll_airflow_command() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() + + +@pytest.mark.asyncio +async def test_poll_airflow_command_async( + transport: str = "grpc_asyncio", request_type=environments.PollAirflowCommandRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.PollAirflowCommandResponse( + output_end=True, + ) + ) + response = await client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.PollAirflowCommandRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True + + +@pytest.mark.asyncio +async def test_poll_airflow_command_async_from_dict(): + await test_poll_airflow_command_async(request_type=dict) + + +def test_poll_airflow_command_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.PollAirflowCommandRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + call.return_value = environments.PollAirflowCommandResponse() + client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_poll_airflow_command_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.PollAirflowCommandRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.poll_airflow_command), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.PollAirflowCommandResponse() + ) + await client.poll_airflow_command(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.SaveSnapshotRequest, + dict, + ], +) +def test_save_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_save_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + client.save_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + +@pytest.mark.asyncio +async def test_save_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.SaveSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.SaveSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_save_snapshot_async_from_dict(): + await test_save_snapshot_async(request_type=dict) + + +def test_save_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_save_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.SaveSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.save_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.save_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.LoadSnapshotRequest, + dict, + ], +) +def test_load_snapshot(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_load_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + client.load_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + +@pytest.mark.asyncio +async def test_load_snapshot_async( + transport: str = "grpc_asyncio", request_type=environments.LoadSnapshotRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.LoadSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_load_snapshot_async_from_dict(): + await test_load_snapshot_async(request_type=dict) + + +def test_load_snapshot_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_load_snapshot_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.LoadSnapshotRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.load_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.load_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.DatabaseFailoverRequest, + dict, + ], +) +def test_database_failover(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_database_failover_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + client.database_failover() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + +@pytest.mark.asyncio +async def test_database_failover_async( + transport: str = "grpc_asyncio", request_type=environments.DatabaseFailoverRequest +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.DatabaseFailoverRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_database_failover_async_from_dict(): + await test_database_failover_async(request_type=dict) + + +def test_database_failover_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.DatabaseFailoverRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_database_failover_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.DatabaseFailoverRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.database_failover), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.database_failover(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.FetchDatabasePropertiesRequest, + dict, + ], +) +def test_fetch_database_properties(request_type, transport: str = "grpc"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) + response = client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +def test_fetch_database_properties_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + client.fetch_database_properties() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + +@pytest.mark.asyncio +async def test_fetch_database_properties_async( + transport: str = "grpc_asyncio", + request_type=environments.FetchDatabasePropertiesRequest, +): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) + ) + response = await client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == environments.FetchDatabasePropertiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +@pytest.mark.asyncio +async def test_fetch_database_properties_async_from_dict(): + await test_fetch_database_properties_async(request_type=dict) + + +def test_fetch_database_properties_field_headers(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.FetchDatabasePropertiesRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + call.return_value = environments.FetchDatabasePropertiesResponse() + client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_database_properties_field_headers_async(): + client = EnvironmentsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = environments.FetchDatabasePropertiesRequest() + + request.environment = "environment_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_database_properties), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + environments.FetchDatabasePropertiesResponse() + ) + await client.fetch_database_properties(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "environment=environment_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + environments.CreateEnvironmentRequest, + dict, + ], +) +def test_create_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + "cloud_data_lineage_integration": {"enabled": True}, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "max_pods_per_node": 1798, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, + }, + "environment_size": 1, + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_environment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_create_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.CreateEnvironmentRequest.pb( + environments.CreateEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = environments.CreateEnvironmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_environment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_environment_rest_bad_request( + transport: str = "rest", request_type=environments.CreateEnvironmentRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + "cloud_data_lineage_integration": {"enabled": True}, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "max_pods_per_node": 1798, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, + }, + "environment_size": 1, + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_environment(request) + + +def test_create_environment_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + environment=environments.Environment(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/environments" + % client.transport._host, + args[1], + ) + + +def test_create_environment_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + environments.CreateEnvironmentRequest(), + parent="parent_value", + environment=environments.Environment(name="name_value"), + ) + + +def test_create_environment_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.GetEnvironmentRequest, + dict, + ], +) +def test_get_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.Environment( + name="name_value", + uuid="uuid_value", + state=environments.Environment.State.CREATING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_environment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, environments.Environment) + assert response.name == "name_value" + assert response.uuid == "uuid_value" + assert response.state == environments.Environment.State.CREATING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_get_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.GetEnvironmentRequest.pb( + environments.GetEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.Environment.to_json( + environments.Environment() + ) + + request = environments.GetEnvironmentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.Environment() + + client.get_environment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_environment_rest_bad_request( + transport: str = "rest", request_type=environments.GetEnvironmentRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_environment(request) + + +def test_get_environment_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.Environment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/environments/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.Environment.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_environment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/environments/*}" + % client.transport._host, + args[1], + ) + + +def test_get_environment_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + environments.GetEnvironmentRequest(), + name="name_value", + ) + + +def test_get_environment_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.ListEnvironmentsRequest, + dict, + ], +) +def test_list_environments_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.ListEnvironmentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_environments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_environments_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_list_environments" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.ListEnvironmentsRequest.pb( + environments.ListEnvironmentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.ListEnvironmentsResponse.to_json( + environments.ListEnvironmentsResponse() + ) + + request = environments.ListEnvironmentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.ListEnvironmentsResponse() + + client.list_environments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_environments_rest_bad_request( + transport: str = "rest", request_type=environments.ListEnvironmentsRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_environments(request) + + +def test_list_environments_rest_flattened(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.ListEnvironmentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_environments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/environments" + % client.transport._host, + args[1], + ) + + +def test_list_environments_rest_flattened_error(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + environments.ListEnvironmentsRequest(), + parent="parent_value", + ) + + +def test_list_environments_rest_pager(transport: str = "rest"): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + environments.Environment(), + environments.Environment(), + ], + next_page_token="abc", + ), + environments.ListEnvironmentsResponse( + environments=[], + next_page_token="def", + ), + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + ], + next_page_token="ghi", + ), + environments.ListEnvironmentsResponse( + environments=[ + environments.Environment(), + environments.Environment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + environments.ListEnvironmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_environments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, environments.Environment) for i in results) + + pages = list(client.list_environments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + environments.UpdateEnvironmentRequest, + dict, + ], +) +def test_update_environment_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init["environment"] = { + "name": "name_value", + "config": { + "gke_cluster": "gke_cluster_value", + "dag_gcs_prefix": "dag_gcs_prefix_value", + "node_count": 1070, + "software_config": { + "image_version": "image_version_value", + "airflow_config_overrides": {}, + "pypi_packages": {}, + "env_variables": {}, + "python_version": "python_version_value", + "scheduler_count": 1607, + "cloud_data_lineage_integration": {"enabled": True}, + }, + "node_config": { + "location": "location_value", + "machine_type": "machine_type_value", + "network": "network_value", + "subnetwork": "subnetwork_value", + "disk_size_gb": 1261, + "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], + "service_account": "service_account_value", + "tags": ["tags_value1", "tags_value2"], + "ip_allocation_policy": { + "use_ip_aliases": True, + "cluster_secondary_range_name": "cluster_secondary_range_name_value", + "services_secondary_range_name": "services_secondary_range_name_value", + "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", + "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", + }, + "max_pods_per_node": 1798, + "enable_ip_masq_agent": True, + }, + "private_environment_config": { + "enable_private_environment": True, + "private_cluster_config": { + "enable_private_endpoint": True, + "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", + "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", + }, + "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", + "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", + "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", + "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", + "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", + "enable_privately_used_public_ips": True, + "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", + "networking_config": {"connection_type": 1}, + }, + "web_server_network_access_control": { + "allowed_ip_ranges": [ + {"value": "value_value", "description": "description_value"} + ] + }, + "database_config": {"machine_type": "machine_type_value"}, + "web_server_config": {"machine_type": "machine_type_value"}, + "airflow_uri": "airflow_uri_value", + "airflow_byoid_uri": "airflow_byoid_uri_value", + "encryption_config": {"kms_key_name": "kms_key_name_value"}, + "maintenance_window": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + "recurrence": "recurrence_value", + }, + "workloads_config": { + "scheduler": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "count": 553, + }, + "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, + "worker": { + "cpu": 0.328, + "memory_gb": 0.961, + "storage_gb": 0.1053, + "min_count": 972, + "max_count": 974, + }, + "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, + }, + "environment_size": 1, + "master_authorized_networks_config": { + "enabled": True, + "cidr_blocks": [ + { + "display_name": "display_name_value", + "cidr_block": "cidr_block_value", + } + ], + }, + "recovery_config": { + "scheduled_snapshots_config": { + "enabled": True, + "snapshot_location": "snapshot_location_value", + "snapshot_creation_schedule": "snapshot_creation_schedule_value", + "time_zone": "time_zone_value", + } + }, + "resilience_mode": 1, + }, + "uuid": "uuid_value", + "state": 1, + "create_time": {}, + "update_time": {}, + "labels": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_environment(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_environment_rest_required_fields( + request_type=environments.UpdateEnvironmentRequest, +): + transport_class = transports.EnvironmentsRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_environment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_environment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_environment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_environment_rest_unset_required_fields(): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_environment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_environment_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_update_environment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.UpdateEnvironmentRequest.pb( + environments.UpdateEnvironmentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() req.return_value._content = json_format.MessageToJson( operations_pb2.Operation() ) - request = environments.CreateEnvironmentRequest() + request = environments.UpdateEnvironmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -2912,7 +4639,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_environment( + client.update_environment( request, metadata=[ ("key", "val"), @@ -2924,8 +4651,8 @@ def test_create_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_environment_rest_bad_request( - transport: str = "rest", request_type=environments.CreateEnvironmentRequest +def test_update_environment_rest_bad_request( + transport: str = "rest", request_type=environments.UpdateEnvironmentRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2933,7 +4660,7 @@ def test_create_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} request_init["environment"] = { "name": "name_value", "config": { @@ -3034,6 +4761,7 @@ def test_create_environment_rest_bad_request( "time_zone": "time_zone_value", } }, + "resilience_mode": 1, }, "uuid": "uuid_value", "state": 1, @@ -3052,10 +4780,10 @@ def test_create_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_environment(request) + client.update_environment(request) -def test_create_environment_rest_flattened(): +def test_update_environment_rest_flattened(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3066,193 +4794,6 @@ def test_create_environment_rest_flattened(): # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation(name="operations/spam") - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - environment=environments.Environment(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.create_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta1/{parent=projects/*/locations/*}/environments" - % client.transport._host, - args[1], - ) - - -def test_create_environment_rest_flattened_error(transport: str = "rest"): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_environment( - environments.CreateEnvironmentRequest(), - parent="parent_value", - environment=environments.Environment(name="name_value"), - ) - - -def test_create_environment_rest_error(): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - environments.GetEnvironmentRequest, - dict, - ], -) -def test_get_environment_rest(request_type): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = environments.Environment( - name="name_value", - uuid="uuid_value", - state=environments.Environment.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = environments.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_environment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, environments.Environment) - assert response.name == "name_value" - assert response.uuid == "uuid_value" - assert response.state == environments.Environment.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_environment_rest_interceptors(null_interceptor): - transport = transports.EnvironmentsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.EnvironmentsRestInterceptor(), - ) - client = EnvironmentsClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_get_environment" - ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_get_environment" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = environments.GetEnvironmentRequest.pb( - environments.GetEnvironmentRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = environments.Environment.to_json( - environments.Environment() - ) - - request = environments.GetEnvironmentRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = environments.Environment() - - client.get_environment( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_environment_rest_bad_request( - transport: str = "rest", request_type=environments.GetEnvironmentRequest -): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_environment(request) - - -def test_get_environment_rest_flattened(): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = environments.Environment() - # get arguments that satisfy an http rule for this method sample_request = { "name": "projects/sample1/locations/sample2/environments/sample3" @@ -3261,18 +4802,19 @@ def test_get_environment_rest_flattened(): # get truthy value for each flattened field mock_args = dict( name="name_value", + environment=environments.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.Environment.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_environment(**mock_args) + client.update_environment(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3285,7 +4827,7 @@ def test_get_environment_rest_flattened(): ) -def test_get_environment_rest_flattened_error(transport: str = "rest"): +def test_update_environment_rest_flattened_error(transport: str = "rest"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3294,13 +4836,15 @@ def test_get_environment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_environment( - environments.GetEnvironmentRequest(), + client.update_environment( + environments.UpdateEnvironmentRequest(), name="name_value", + environment=environments.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_environment_rest_error(): +def test_update_environment_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -3309,44 +4853,40 @@ def test_get_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.ListEnvironmentsRequest, + environments.DeleteEnvironmentRequest, dict, ], ) -def test_list_environments_rest(request_type): +def test_delete_environment_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = environments.ListEnvironmentsResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_environments(request) + response = client.delete_environment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEnvironmentsPager) - assert response.next_page_token == "next_page_token_value" + assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_environments_rest_interceptors(null_interceptor): +def test_delete_environment_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3359,14 +4899,16 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_list_environments" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_delete_environment" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_list_environments" + transports.EnvironmentsRestInterceptor, "pre_delete_environment" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.ListEnvironmentsRequest.pb( - environments.ListEnvironmentsRequest() + pb_message = environments.DeleteEnvironmentRequest.pb( + environments.DeleteEnvironmentRequest() ) transcode.return_value = { "method": "post", @@ -3378,19 +4920,19 @@ def test_list_environments_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = environments.ListEnvironmentsResponse.to_json( - environments.ListEnvironmentsResponse() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() ) - request = environments.ListEnvironmentsRequest() + request = environments.DeleteEnvironmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = environments.ListEnvironmentsResponse() + post.return_value = operations_pb2.Operation() - client.list_environments( + client.delete_environment( request, metadata=[ ("key", "val"), @@ -3402,8 +4944,8 @@ def test_list_environments_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_environments_rest_bad_request( - transport: str = "rest", request_type=environments.ListEnvironmentsRequest +def test_delete_environment_rest_bad_request( + transport: str = "rest", request_type=environments.DeleteEnvironmentRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3411,7 +4953,7 @@ def test_list_environments_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3423,10 +4965,10 @@ def test_list_environments_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_environments(request) + client.delete_environment(request) -def test_list_environments_rest_flattened(): +def test_delete_environment_rest_flattened(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3435,39 +4977,40 @@ def test_list_environments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = environments.ListEnvironmentsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/environments/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = environments.ListEnvironmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_environments(**mock_args) + client.delete_environment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta1/{parent=projects/*/locations/*}/environments" + "%s/v1beta1/{name=projects/*/locations/*/environments/*}" % client.transport._host, args[1], ) -def test_list_environments_rest_flattened_error(transport: str = "rest"): +def test_delete_environment_rest_flattened_error(transport: str = "rest"): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3476,83 +5019,26 @@ def test_list_environments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_environments( - environments.ListEnvironmentsRequest(), - parent="parent_value", + client.delete_environment( + environments.DeleteEnvironmentRequest(), + name="name_value", ) -def test_list_environments_rest_pager(transport: str = "rest"): +def test_delete_environment_rest_error(): client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - environments.Environment(), - environments.Environment(), - ], - next_page_token="abc", - ), - environments.ListEnvironmentsResponse( - environments=[], - next_page_token="def", - ), - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - ], - next_page_token="ghi", - ), - environments.ListEnvironmentsResponse( - environments=[ - environments.Environment(), - environments.Environment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - environments.ListEnvironmentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_environments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, environments.Environment) for i in results) - - pages = list(client.list_environments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - environments.UpdateEnvironmentRequest, + environments.RestartWebServerRequest, dict, ], ) -def test_update_environment_rest(request_type): +def test_restart_web_server_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3560,112 +5046,131 @@ def test_update_environment_rest(request_type): # send a request that will satisfy transcoding request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - "cloud_data_lineage_integration": {"enabled": True}, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "max_pods_per_node": 1798, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, - }, - "environment_size": 1, - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.restart_web_server(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restart_web_server_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_restart_web_server" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_restart_web_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.RestartWebServerRequest.pb( + environments.RestartWebServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = environments.RestartWebServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.restart_web_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_restart_web_server_rest_bad_request( + transport: str = "rest", request_type=environments.RestartWebServerRequest +): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.restart_web_server(request) + + +def test_restart_web_server_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.CheckUpgradeRequest, + dict, + ], +) +def test_check_upgrade_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" } request = request_type(**request_init) @@ -3681,96 +5186,152 @@ def test_update_environment_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_environment(request) + response = client.check_upgrade(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_update_environment_rest_required_fields( - request_type=environments.UpdateEnvironmentRequest, -): - transport_class = transports.EnvironmentsRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_upgrade_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_check_upgrade" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_check_upgrade" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.CheckUpgradeRequest.pb( + environments.CheckUpgradeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # verify fields with default values are dropped + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_environment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = environments.CheckUpgradeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # verify required fields with default values are now present + client.check_upgrade( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_environment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) + pre.assert_called_once() + post.assert_called_once() - # verify required fields with non-default values are left alone +def test_check_upgrade_rest_bad_request( + transport: str = "rest", request_type=environments.CheckUpgradeRequest +): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_upgrade(request) - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value +def test_check_upgrade_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - response = client.update_environment(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.parametrize( + "request_type", + [ + environments.ExecuteAirflowCommandRequest, + dict, + ], +) +def test_execute_airflow_command_rest(request_type): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = environments.ExecuteAirflowCommandResponse( + execution_id="execution_id_value", + pod="pod_value", + pod_namespace="pod_namespace_value", + error="error_value", + ) -def test_update_environment_rest_unset_required_fields(): - transport = transports.EnvironmentsRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = environments.ExecuteAirflowCommandResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.execute_airflow_command(request) - unset_fields = transport.update_environment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("updateMask",))) + # Establish that the response is the type that we expect. + assert isinstance(response, environments.ExecuteAirflowCommandResponse) + assert response.execution_id == "execution_id_value" + assert response.pod == "pod_value" + assert response.pod_namespace == "pod_namespace_value" + assert response.error == "error_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_environment_rest_interceptors(null_interceptor): +def test_execute_airflow_command_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3783,16 +5344,14 @@ def test_update_environment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_update_environment" + transports.EnvironmentsRestInterceptor, "post_execute_airflow_command" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_update_environment" + transports.EnvironmentsRestInterceptor, "pre_execute_airflow_command" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.UpdateEnvironmentRequest.pb( - environments.UpdateEnvironmentRequest() + pb_message = environments.ExecuteAirflowCommandRequest.pb( + environments.ExecuteAirflowCommandRequest() ) transcode.return_value = { "method": "post", @@ -3804,19 +5363,19 @@ def test_update_environment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = environments.ExecuteAirflowCommandResponse.to_json( + environments.ExecuteAirflowCommandResponse() ) - request = environments.UpdateEnvironmentRequest() + request = environments.ExecuteAirflowCommandRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = environments.ExecuteAirflowCommandResponse() - client.update_environment( + client.execute_airflow_command( request, metadata=[ ("key", "val"), @@ -3828,8 +5387,8 @@ def test_update_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_update_environment_rest_bad_request( - transport: str = "rest", request_type=environments.UpdateEnvironmentRequest +def test_execute_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.ExecuteAirflowCommandRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3837,113 +5396,8 @@ def test_update_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} - request_init["environment"] = { - "name": "name_value", - "config": { - "gke_cluster": "gke_cluster_value", - "dag_gcs_prefix": "dag_gcs_prefix_value", - "node_count": 1070, - "software_config": { - "image_version": "image_version_value", - "airflow_config_overrides": {}, - "pypi_packages": {}, - "env_variables": {}, - "python_version": "python_version_value", - "scheduler_count": 1607, - "cloud_data_lineage_integration": {"enabled": True}, - }, - "node_config": { - "location": "location_value", - "machine_type": "machine_type_value", - "network": "network_value", - "subnetwork": "subnetwork_value", - "disk_size_gb": 1261, - "oauth_scopes": ["oauth_scopes_value1", "oauth_scopes_value2"], - "service_account": "service_account_value", - "tags": ["tags_value1", "tags_value2"], - "ip_allocation_policy": { - "use_ip_aliases": True, - "cluster_secondary_range_name": "cluster_secondary_range_name_value", - "services_secondary_range_name": "services_secondary_range_name_value", - "cluster_ipv4_cidr_block": "cluster_ipv4_cidr_block_value", - "services_ipv4_cidr_block": "services_ipv4_cidr_block_value", - }, - "max_pods_per_node": 1798, - "enable_ip_masq_agent": True, - }, - "private_environment_config": { - "enable_private_environment": True, - "private_cluster_config": { - "enable_private_endpoint": True, - "master_ipv4_cidr_block": "master_ipv4_cidr_block_value", - "master_ipv4_reserved_range": "master_ipv4_reserved_range_value", - }, - "web_server_ipv4_cidr_block": "web_server_ipv4_cidr_block_value", - "cloud_sql_ipv4_cidr_block": "cloud_sql_ipv4_cidr_block_value", - "web_server_ipv4_reserved_range": "web_server_ipv4_reserved_range_value", - "cloud_composer_network_ipv4_cidr_block": "cloud_composer_network_ipv4_cidr_block_value", - "cloud_composer_network_ipv4_reserved_range": "cloud_composer_network_ipv4_reserved_range_value", - "enable_privately_used_public_ips": True, - "cloud_composer_connection_subnetwork": "cloud_composer_connection_subnetwork_value", - "networking_config": {"connection_type": 1}, - }, - "web_server_network_access_control": { - "allowed_ip_ranges": [ - {"value": "value_value", "description": "description_value"} - ] - }, - "database_config": {"machine_type": "machine_type_value"}, - "web_server_config": {"machine_type": "machine_type_value"}, - "airflow_uri": "airflow_uri_value", - "airflow_byoid_uri": "airflow_byoid_uri_value", - "encryption_config": {"kms_key_name": "kms_key_name_value"}, - "maintenance_window": { - "start_time": {"seconds": 751, "nanos": 543}, - "end_time": {}, - "recurrence": "recurrence_value", - }, - "workloads_config": { - "scheduler": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "count": 553, - }, - "web_server": {"cpu": 0.328, "memory_gb": 0.961, "storage_gb": 0.1053}, - "worker": { - "cpu": 0.328, - "memory_gb": 0.961, - "storage_gb": 0.1053, - "min_count": 972, - "max_count": 974, - }, - "triggerer": {"count": 553, "cpu": 0.328, "memory_gb": 0.961}, - }, - "environment_size": 1, - "master_authorized_networks_config": { - "enabled": True, - "cidr_blocks": [ - { - "display_name": "display_name_value", - "cidr_block": "cidr_block_value", - } - ], - }, - "recovery_config": { - "scheduled_snapshots_config": { - "enabled": True, - "snapshot_location": "snapshot_location_value", - "snapshot_creation_schedule": "snapshot_creation_schedule_value", - "time_zone": "time_zone_value", - } - }, - }, - "uuid": "uuid_value", - "state": 1, - "create_time": {}, - "update_time": {}, - "labels": {}, + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" } request = request_type(**request_init) @@ -3956,71 +5410,142 @@ def test_update_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_environment(request) + client.execute_airflow_command(request) -def test_update_environment_rest_flattened(): +def test_execute_airflow_command_rest_error(): + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + environments.StopAirflowCommandRequest, + dict, + ], +) +def test_stop_airflow_command_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/environments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - environment=environments.Environment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + return_value = environments.StopAirflowCommandResponse( + is_done=True, + output=["output_value"], ) - mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.StopAirflowCommandResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + response = client.stop_airflow_command(request) - client.update_environment(**mock_args) + # Establish that the response is the type that we expect. + assert isinstance(response, environments.StopAirflowCommandResponse) + assert response.is_done is True + assert response.output == ["output_value"] - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta1/{name=projects/*/locations/*/environments/*}" - % client.transport._host, - args[1], + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stop_airflow_command_rest_interceptors(null_interceptor): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EnvironmentsRestInterceptor(), + ) + client = EnvironmentsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_stop_airflow_command" + ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "pre_stop_airflow_command" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = environments.StopAirflowCommandRequest.pb( + environments.StopAirflowCommandRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = environments.StopAirflowCommandResponse.to_json( + environments.StopAirflowCommandResponse() ) + request = environments.StopAirflowCommandRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = environments.StopAirflowCommandResponse() -def test_update_environment_rest_flattened_error(transport: str = "rest"): + client.stop_airflow_command( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_stop_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.StopAirflowCommandRequest +): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_environment( - environments.UpdateEnvironmentRequest(), - name="name_value", - environment=environments.Environment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + # send a request that will satisfy transcoding + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop_airflow_command(request) -def test_update_environment_rest_error(): +def test_stop_airflow_command_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4029,40 +5554,46 @@ def test_update_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.DeleteEnvironmentRequest, + environments.PollAirflowCommandRequest, dict, ], ) -def test_delete_environment_rest(request_type): +def test_poll_airflow_command_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = environments.PollAirflowCommandResponse( + output_end=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.PollAirflowCommandResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_environment(request) + response = client.poll_airflow_command(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.PollAirflowCommandResponse) + assert response.output_end is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_environment_rest_interceptors(null_interceptor): +def test_poll_airflow_command_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4075,16 +5606,14 @@ def test_delete_environment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_delete_environment" + transports.EnvironmentsRestInterceptor, "post_poll_airflow_command" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_delete_environment" + transports.EnvironmentsRestInterceptor, "pre_poll_airflow_command" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.DeleteEnvironmentRequest.pb( - environments.DeleteEnvironmentRequest() + pb_message = environments.PollAirflowCommandRequest.pb( + environments.PollAirflowCommandRequest() ) transcode.return_value = { "method": "post", @@ -4096,19 +5625,19 @@ def test_delete_environment_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = environments.PollAirflowCommandResponse.to_json( + environments.PollAirflowCommandResponse() ) - request = environments.DeleteEnvironmentRequest() + request = environments.PollAirflowCommandRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = environments.PollAirflowCommandResponse() - client.delete_environment( + client.poll_airflow_command( request, metadata=[ ("key", "val"), @@ -4120,8 +5649,8 @@ def test_delete_environment_rest_interceptors(null_interceptor): post.assert_called_once() -def test_delete_environment_rest_bad_request( - transport: str = "rest", request_type=environments.DeleteEnvironmentRequest +def test_poll_airflow_command_rest_bad_request( + transport: str = "rest", request_type=environments.PollAirflowCommandRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4129,7 +5658,9 @@ def test_delete_environment_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4141,67 +5672,10 @@ def test_delete_environment_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_environment(request) - - -def test_delete_environment_rest_flattened(): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/environments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_environment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta1/{name=projects/*/locations/*/environments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_environment_rest_flattened_error(transport: str = "rest"): - client = EnvironmentsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_environment( - environments.DeleteEnvironmentRequest(), - name="name_value", - ) + client.poll_airflow_command(request) -def test_delete_environment_rest_error(): +def test_poll_airflow_command_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4210,18 +5684,20 @@ def test_delete_environment_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.RestartWebServerRequest, + environments.SaveSnapshotRequest, dict, ], ) -def test_restart_web_server_rest(request_type): +def test_save_snapshot_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4236,14 +5712,14 @@ def test_restart_web_server_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.restart_web_server(request) + response = client.save_snapshot(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restart_web_server_rest_interceptors(null_interceptor): +def test_save_snapshot_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4258,14 +5734,14 @@ def test_restart_web_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_restart_web_server" + transports.EnvironmentsRestInterceptor, "post_save_snapshot" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_restart_web_server" + transports.EnvironmentsRestInterceptor, "pre_save_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.RestartWebServerRequest.pb( - environments.RestartWebServerRequest() + pb_message = environments.SaveSnapshotRequest.pb( + environments.SaveSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -4281,7 +5757,7 @@ def test_restart_web_server_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.RestartWebServerRequest() + request = environments.SaveSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4289,7 +5765,7 @@ def test_restart_web_server_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.restart_web_server( + client.save_snapshot( request, metadata=[ ("key", "val"), @@ -4301,8 +5777,8 @@ def test_restart_web_server_rest_interceptors(null_interceptor): post.assert_called_once() -def test_restart_web_server_rest_bad_request( - transport: str = "rest", request_type=environments.RestartWebServerRequest +def test_save_snapshot_rest_bad_request( + transport: str = "rest", request_type=environments.SaveSnapshotRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4310,7 +5786,9 @@ def test_restart_web_server_rest_bad_request( ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/environments/sample3"} + request_init = { + "environment": "projects/sample1/locations/sample2/environments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4322,10 +5800,10 @@ def test_restart_web_server_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.restart_web_server(request) + client.save_snapshot(request) -def test_restart_web_server_rest_error(): +def test_save_snapshot_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4334,11 +5812,11 @@ def test_restart_web_server_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.CheckUpgradeRequest, + environments.LoadSnapshotRequest, dict, ], ) -def test_check_upgrade_rest(request_type): +def test_load_snapshot_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4362,14 +5840,14 @@ def test_check_upgrade_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.check_upgrade(request) + response = client.load_snapshot(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_check_upgrade_rest_interceptors(null_interceptor): +def test_load_snapshot_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4384,14 +5862,14 @@ def test_check_upgrade_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_check_upgrade" + transports.EnvironmentsRestInterceptor, "post_load_snapshot" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_check_upgrade" + transports.EnvironmentsRestInterceptor, "pre_load_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.CheckUpgradeRequest.pb( - environments.CheckUpgradeRequest() + pb_message = environments.LoadSnapshotRequest.pb( + environments.LoadSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -4407,7 +5885,7 @@ def test_check_upgrade_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.CheckUpgradeRequest() + request = environments.LoadSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4415,7 +5893,7 @@ def test_check_upgrade_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.check_upgrade( + client.load_snapshot( request, metadata=[ ("key", "val"), @@ -4427,8 +5905,8 @@ def test_check_upgrade_rest_interceptors(null_interceptor): post.assert_called_once() -def test_check_upgrade_rest_bad_request( - transport: str = "rest", request_type=environments.CheckUpgradeRequest +def test_load_snapshot_rest_bad_request( + transport: str = "rest", request_type=environments.LoadSnapshotRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4450,10 +5928,10 @@ def test_check_upgrade_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.check_upgrade(request) + client.load_snapshot(request) -def test_check_upgrade_rest_error(): +def test_load_snapshot_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4462,11 +5940,11 @@ def test_check_upgrade_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.SaveSnapshotRequest, + environments.DatabaseFailoverRequest, dict, ], ) -def test_save_snapshot_rest(request_type): +def test_database_failover_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4490,14 +5968,14 @@ def test_save_snapshot_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.save_snapshot(request) + response = client.database_failover(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_save_snapshot_rest_interceptors(null_interceptor): +def test_database_failover_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4512,14 +5990,14 @@ def test_save_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_save_snapshot" + transports.EnvironmentsRestInterceptor, "post_database_failover" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_save_snapshot" + transports.EnvironmentsRestInterceptor, "pre_database_failover" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.SaveSnapshotRequest.pb( - environments.SaveSnapshotRequest() + pb_message = environments.DatabaseFailoverRequest.pb( + environments.DatabaseFailoverRequest() ) transcode.return_value = { "method": "post", @@ -4535,7 +6013,7 @@ def test_save_snapshot_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = environments.SaveSnapshotRequest() + request = environments.DatabaseFailoverRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4543,7 +6021,7 @@ def test_save_snapshot_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.save_snapshot( + client.database_failover( request, metadata=[ ("key", "val"), @@ -4555,8 +6033,8 @@ def test_save_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_save_snapshot_rest_bad_request( - transport: str = "rest", request_type=environments.SaveSnapshotRequest +def test_database_failover_rest_bad_request( + transport: str = "rest", request_type=environments.DatabaseFailoverRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4578,10 +6056,10 @@ def test_save_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.save_snapshot(request) + client.database_failover(request) -def test_save_snapshot_rest_error(): +def test_database_failover_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4590,11 +6068,11 @@ def test_save_snapshot_rest_error(): @pytest.mark.parametrize( "request_type", [ - environments.LoadSnapshotRequest, + environments.FetchDatabasePropertiesRequest, dict, ], ) -def test_load_snapshot_rest(request_type): +def test_fetch_database_properties_rest(request_type): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4609,23 +6087,119 @@ def test_load_snapshot_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = environments.FetchDatabasePropertiesResponse( + primary_gce_zone="primary_gce_zone_value", + secondary_gce_zone="secondary_gce_zone_value", + is_failover_replica_available=True, + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = environments.FetchDatabasePropertiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.load_snapshot(request) + response = client.fetch_database_properties(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, environments.FetchDatabasePropertiesResponse) + assert response.primary_gce_zone == "primary_gce_zone_value" + assert response.secondary_gce_zone == "secondary_gce_zone_value" + assert response.is_failover_replica_available is True + + +def test_fetch_database_properties_rest_required_fields( + request_type=environments.FetchDatabasePropertiesRequest, +): + transport_class = transports.EnvironmentsRestTransport + + request_init = {} + request_init["environment"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_database_properties._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["environment"] = "environment_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_database_properties._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "environment" in jsonified_request + assert jsonified_request["environment"] == "environment_value" + + client = EnvironmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = environments.FetchDatabasePropertiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = environments.FetchDatabasePropertiesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.fetch_database_properties(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_database_properties_rest_unset_required_fields(): + transport = transports.EnvironmentsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_database_properties._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("environment",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_load_snapshot_rest_interceptors(null_interceptor): +def test_fetch_database_properties_rest_interceptors(null_interceptor): transport = transports.EnvironmentsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4638,16 +6212,14 @@ def test_load_snapshot_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.EnvironmentsRestInterceptor, "post_load_snapshot" + transports.EnvironmentsRestInterceptor, "post_fetch_database_properties" ) as post, mock.patch.object( - transports.EnvironmentsRestInterceptor, "pre_load_snapshot" + transports.EnvironmentsRestInterceptor, "pre_fetch_database_properties" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = environments.LoadSnapshotRequest.pb( - environments.LoadSnapshotRequest() + pb_message = environments.FetchDatabasePropertiesRequest.pb( + environments.FetchDatabasePropertiesRequest() ) transcode.return_value = { "method": "post", @@ -4659,19 +6231,21 @@ def test_load_snapshot_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + environments.FetchDatabasePropertiesResponse.to_json( + environments.FetchDatabasePropertiesResponse() + ) ) - request = environments.LoadSnapshotRequest() + request = environments.FetchDatabasePropertiesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = environments.FetchDatabasePropertiesResponse() - client.load_snapshot( + client.fetch_database_properties( request, metadata=[ ("key", "val"), @@ -4683,8 +6257,8 @@ def test_load_snapshot_rest_interceptors(null_interceptor): post.assert_called_once() -def test_load_snapshot_rest_bad_request( - transport: str = "rest", request_type=environments.LoadSnapshotRequest +def test_fetch_database_properties_rest_bad_request( + transport: str = "rest", request_type=environments.FetchDatabasePropertiesRequest ): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4706,10 +6280,10 @@ def test_load_snapshot_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.load_snapshot(request) + client.fetch_database_properties(request) -def test_load_snapshot_rest_error(): +def test_fetch_database_properties_rest_error(): client = EnvironmentsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -4861,8 +6435,13 @@ def test_environments_base_transport(): "delete_environment", "restart_web_server", "check_upgrade", + "execute_airflow_command", + "stop_airflow_command", + "poll_airflow_command", "save_snapshot", "load_snapshot", + "database_failover", + "fetch_database_properties", "get_operation", "delete_operation", "list_operations", @@ -5164,12 +6743,27 @@ def test_environments_client_transport_session_collision(transport_name): session1 = client1.transport.check_upgrade._session session2 = client2.transport.check_upgrade._session assert session1 != session2 + session1 = client1.transport.execute_airflow_command._session + session2 = client2.transport.execute_airflow_command._session + assert session1 != session2 + session1 = client1.transport.stop_airflow_command._session + session2 = client2.transport.stop_airflow_command._session + assert session1 != session2 + session1 = client1.transport.poll_airflow_command._session + session2 = client2.transport.poll_airflow_command._session + assert session1 != session2 session1 = client1.transport.save_snapshot._session session2 = client2.transport.save_snapshot._session assert session1 != session2 session1 = client1.transport.load_snapshot._session session2 = client2.transport.load_snapshot._session assert session1 != session2 + session1 = client1.transport.database_failover._session + session2 = client2.transport.database_failover._session + assert session1 != session2 + session1 = client1.transport.fetch_database_properties._session + session2 = client2.transport.fetch_database_properties._session + assert session1 != session2 def test_environments_grpc_transport_channel():